sha
stringlengths
40
40
text
stringlengths
1
13.4M
id
stringlengths
2
117
tags
sequencelengths
1
7.91k
created_at
stringlengths
25
25
metadata
stringlengths
2
875k
last_modified
stringlengths
25
25
arxiv
sequencelengths
0
25
languages
sequencelengths
0
7.91k
tags_str
stringlengths
17
159k
text_str
stringlengths
1
447k
text_lists
sequencelengths
0
352
processed_texts
sequencelengths
1
353
ee9da05819ce2568b67cde47be8f4d2e2e42aee4
# Dataset Card for Evaluation run of alnrg2arg/blockchainlabs_test3_seminar <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [alnrg2arg/blockchainlabs_test3_seminar](https://huggingface.co/alnrg2arg/blockchainlabs_test3_seminar) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_alnrg2arg__blockchainlabs_test3_seminar", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-02-02T04:30:24.941518](https://huggingface.co/datasets/open-llm-leaderboard/details_alnrg2arg__blockchainlabs_test3_seminar/blob/main/results_2024-02-02T04-30-24.941518.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.6526679268733767, "acc_stderr": 0.03208915302774204, "acc_norm": 0.6516694604557469, "acc_norm_stderr": 0.032768893712299095, "mc1": 0.5716034271725826, "mc1_stderr": 0.017323088597314743, "mc2": 0.7247121699417279, "mc2_stderr": 0.01469874984195087 }, "harness|arc:challenge|25": { "acc": 0.7039249146757679, "acc_stderr": 0.013340916085246256, "acc_norm": 0.7218430034129693, "acc_norm_stderr": 0.013094469919538809 }, "harness|hellaswag|10": { "acc": 0.711611232822147, "acc_stderr": 0.004520870679457038, "acc_norm": 0.8893646683927504, "acc_norm_stderr": 0.0031303894668331987 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.35, "acc_stderr": 0.0479372485441102, "acc_norm": 0.35, "acc_norm_stderr": 0.0479372485441102 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.6518518518518519, "acc_stderr": 0.041153246103369526, "acc_norm": 0.6518518518518519, "acc_norm_stderr": 0.041153246103369526 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.7105263157894737, "acc_stderr": 0.03690677986137283, "acc_norm": 0.7105263157894737, "acc_norm_stderr": 0.03690677986137283 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.64, "acc_stderr": 0.04824181513244218, "acc_norm": 0.64, "acc_norm_stderr": 0.04824181513244218 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.7056603773584905, "acc_stderr": 0.028049186315695255, "acc_norm": 0.7056603773584905, "acc_norm_stderr": 0.028049186315695255 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.7708333333333334, "acc_stderr": 0.03514697467862388, "acc_norm": 0.7708333333333334, "acc_norm_stderr": 0.03514697467862388 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.46, "acc_stderr": 0.05009082659620333, "acc_norm": 0.46, "acc_norm_stderr": 0.05009082659620333 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.52, "acc_stderr": 0.050211673156867795, "acc_norm": 0.52, "acc_norm_stderr": 0.050211673156867795 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.31, "acc_stderr": 0.04648231987117316, "acc_norm": 0.31, "acc_norm_stderr": 0.04648231987117316 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.6820809248554913, "acc_stderr": 0.0355068398916558, "acc_norm": 0.6820809248554913, "acc_norm_stderr": 0.0355068398916558 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.4117647058823529, "acc_stderr": 0.048971049527263666, "acc_norm": 0.4117647058823529, "acc_norm_stderr": 0.048971049527263666 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.76, "acc_stderr": 0.04292346959909283, "acc_norm": 0.76, "acc_norm_stderr": 0.04292346959909283 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.574468085106383, "acc_stderr": 0.03232146916224468, "acc_norm": 0.574468085106383, "acc_norm_stderr": 0.03232146916224468 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.5, "acc_stderr": 0.047036043419179864, "acc_norm": 0.5, "acc_norm_stderr": 0.047036043419179864 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.5448275862068965, "acc_stderr": 0.04149886942192117, "acc_norm": 0.5448275862068965, "acc_norm_stderr": 0.04149886942192117 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.43386243386243384, "acc_stderr": 0.025525034382474887, "acc_norm": 0.43386243386243384, "acc_norm_stderr": 0.025525034382474887 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.46825396825396826, "acc_stderr": 0.04463112720677171, "acc_norm": 0.46825396825396826, "acc_norm_stderr": 0.04463112720677171 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.31, "acc_stderr": 0.04648231987117316, "acc_norm": 0.31, "acc_norm_stderr": 0.04648231987117316 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.7774193548387097, "acc_stderr": 0.023664216671642518, "acc_norm": 0.7774193548387097, "acc_norm_stderr": 0.023664216671642518 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.5073891625615764, "acc_stderr": 0.035176035403610105, "acc_norm": 0.5073891625615764, "acc_norm_stderr": 0.035176035403610105 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.68, "acc_stderr": 0.04688261722621505, "acc_norm": 0.68, "acc_norm_stderr": 0.04688261722621505 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.7818181818181819, "acc_stderr": 0.03225078108306289, "acc_norm": 0.7818181818181819, "acc_norm_stderr": 0.03225078108306289 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.8181818181818182, "acc_stderr": 0.0274796030105388, "acc_norm": 0.8181818181818182, "acc_norm_stderr": 0.0274796030105388 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.8963730569948186, "acc_stderr": 0.02199531196364424, "acc_norm": 0.8963730569948186, "acc_norm_stderr": 0.02199531196364424 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.6564102564102564, "acc_stderr": 0.024078696580635477, "acc_norm": 0.6564102564102564, "acc_norm_stderr": 0.024078696580635477 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.34814814814814815, "acc_stderr": 0.029045600290616255, "acc_norm": 0.34814814814814815, "acc_norm_stderr": 0.029045600290616255 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.6764705882352942, "acc_stderr": 0.030388353551886793, "acc_norm": 0.6764705882352942, "acc_norm_stderr": 0.030388353551886793 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.3708609271523179, "acc_stderr": 0.03943966699183629, "acc_norm": 0.3708609271523179, "acc_norm_stderr": 0.03943966699183629 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.8458715596330275, "acc_stderr": 0.015480826865374307, "acc_norm": 0.8458715596330275, "acc_norm_stderr": 0.015480826865374307 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.49537037037037035, "acc_stderr": 0.03409825519163572, "acc_norm": 0.49537037037037035, "acc_norm_stderr": 0.03409825519163572 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.8382352941176471, "acc_stderr": 0.02584501798692692, "acc_norm": 0.8382352941176471, "acc_norm_stderr": 0.02584501798692692 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.8016877637130801, "acc_stderr": 0.02595502084162113, "acc_norm": 0.8016877637130801, "acc_norm_stderr": 0.02595502084162113 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.6816143497757847, "acc_stderr": 0.03126580522513713, "acc_norm": 0.6816143497757847, "acc_norm_stderr": 0.03126580522513713 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.8015267175572519, "acc_stderr": 0.034981493854624714, "acc_norm": 0.8015267175572519, "acc_norm_stderr": 0.034981493854624714 }, "harness|hendrycksTest-international_law|5": { "acc": 0.7851239669421488, "acc_stderr": 0.037494924487096966, "acc_norm": 0.7851239669421488, "acc_norm_stderr": 0.037494924487096966 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.7407407407407407, "acc_stderr": 0.04236511258094632, "acc_norm": 0.7407407407407407, "acc_norm_stderr": 0.04236511258094632 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.7607361963190185, "acc_stderr": 0.0335195387952127, "acc_norm": 0.7607361963190185, "acc_norm_stderr": 0.0335195387952127 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.45535714285714285, "acc_stderr": 0.047268355537191, "acc_norm": 0.45535714285714285, "acc_norm_stderr": 0.047268355537191 }, "harness|hendrycksTest-management|5": { "acc": 0.7669902912621359, "acc_stderr": 0.04185832598928315, "acc_norm": 0.7669902912621359, "acc_norm_stderr": 0.04185832598928315 }, "harness|hendrycksTest-marketing|5": { "acc": 0.8846153846153846, "acc_stderr": 0.02093019318517933, "acc_norm": 0.8846153846153846, "acc_norm_stderr": 0.02093019318517933 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.72, "acc_stderr": 0.04512608598542128, "acc_norm": 0.72, "acc_norm_stderr": 0.04512608598542128 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.8237547892720306, "acc_stderr": 0.013625556907993466, "acc_norm": 0.8237547892720306, "acc_norm_stderr": 0.013625556907993466 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.7341040462427746, "acc_stderr": 0.023786203255508297, "acc_norm": 0.7341040462427746, "acc_norm_stderr": 0.023786203255508297 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.4223463687150838, "acc_stderr": 0.016519594275297117, "acc_norm": 0.4223463687150838, "acc_norm_stderr": 0.016519594275297117 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.7189542483660131, "acc_stderr": 0.02573885479781873, "acc_norm": 0.7189542483660131, "acc_norm_stderr": 0.02573885479781873 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.7106109324758842, "acc_stderr": 0.025755865922632945, "acc_norm": 0.7106109324758842, "acc_norm_stderr": 0.025755865922632945 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.7469135802469136, "acc_stderr": 0.024191808600712992, "acc_norm": 0.7469135802469136, "acc_norm_stderr": 0.024191808600712992 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.4716312056737589, "acc_stderr": 0.02977945095730307, "acc_norm": 0.4716312056737589, "acc_norm_stderr": 0.02977945095730307 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.47131681877444587, "acc_stderr": 0.01274920600765747, "acc_norm": 0.47131681877444587, "acc_norm_stderr": 0.01274920600765747 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.6764705882352942, "acc_stderr": 0.02841820861940676, "acc_norm": 0.6764705882352942, "acc_norm_stderr": 0.02841820861940676 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.6683006535947712, "acc_stderr": 0.01904748523936038, "acc_norm": 0.6683006535947712, "acc_norm_stderr": 0.01904748523936038 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.6909090909090909, "acc_stderr": 0.044262946482000985, "acc_norm": 0.6909090909090909, "acc_norm_stderr": 0.044262946482000985 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.7306122448979592, "acc_stderr": 0.02840125202902294, "acc_norm": 0.7306122448979592, "acc_norm_stderr": 0.02840125202902294 }, "harness|hendrycksTest-sociology|5": { "acc": 0.835820895522388, "acc_stderr": 0.026193923544454115, "acc_norm": 0.835820895522388, "acc_norm_stderr": 0.026193923544454115 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.85, "acc_stderr": 0.0358870281282637, "acc_norm": 0.85, "acc_norm_stderr": 0.0358870281282637 }, "harness|hendrycksTest-virology|5": { "acc": 0.5542168674698795, "acc_stderr": 0.03869543323472101, "acc_norm": 0.5542168674698795, "acc_norm_stderr": 0.03869543323472101 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.8362573099415205, "acc_stderr": 0.028380919596145866, "acc_norm": 0.8362573099415205, "acc_norm_stderr": 0.028380919596145866 }, "harness|truthfulqa:mc|0": { "mc1": 0.5716034271725826, "mc1_stderr": 0.017323088597314743, "mc2": 0.7247121699417279, "mc2_stderr": 0.01469874984195087 }, "harness|winogrande|5": { "acc": 0.856353591160221, "acc_stderr": 0.009857280052696737 }, "harness|gsm8k|5": { "acc": 0.7035633055344959, "acc_stderr": 0.012579398235589534 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_alnrg2arg__blockchainlabs_test3_seminar
[ "region:us" ]
2024-02-02T04:32:47+00:00
{"pretty_name": "Evaluation run of alnrg2arg/blockchainlabs_test3_seminar", "dataset_summary": "Dataset automatically created during the evaluation run of model [alnrg2arg/blockchainlabs_test3_seminar](https://huggingface.co/alnrg2arg/blockchainlabs_test3_seminar) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_alnrg2arg__blockchainlabs_test3_seminar\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-02-02T04:30:24.941518](https://huggingface.co/datasets/open-llm-leaderboard/details_alnrg2arg__blockchainlabs_test3_seminar/blob/main/results_2024-02-02T04-30-24.941518.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.6526679268733767,\n \"acc_stderr\": 0.03208915302774204,\n \"acc_norm\": 0.6516694604557469,\n \"acc_norm_stderr\": 0.032768893712299095,\n \"mc1\": 0.5716034271725826,\n \"mc1_stderr\": 0.017323088597314743,\n \"mc2\": 0.7247121699417279,\n \"mc2_stderr\": 0.01469874984195087\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.7039249146757679,\n \"acc_stderr\": 0.013340916085246256,\n \"acc_norm\": 0.7218430034129693,\n \"acc_norm_stderr\": 0.013094469919538809\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.711611232822147,\n \"acc_stderr\": 0.004520870679457038,\n \"acc_norm\": 0.8893646683927504,\n \"acc_norm_stderr\": 0.0031303894668331987\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.35,\n \"acc_stderr\": 0.0479372485441102,\n \"acc_norm\": 0.35,\n \"acc_norm_stderr\": 0.0479372485441102\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.6518518518518519,\n \"acc_stderr\": 0.041153246103369526,\n \"acc_norm\": 0.6518518518518519,\n \"acc_norm_stderr\": 0.041153246103369526\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.7105263157894737,\n \"acc_stderr\": 0.03690677986137283,\n \"acc_norm\": 0.7105263157894737,\n \"acc_norm_stderr\": 0.03690677986137283\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.64,\n \"acc_stderr\": 0.04824181513244218,\n \"acc_norm\": 0.64,\n \"acc_norm_stderr\": 0.04824181513244218\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.7056603773584905,\n \"acc_stderr\": 0.028049186315695255,\n \"acc_norm\": 0.7056603773584905,\n \"acc_norm_stderr\": 0.028049186315695255\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.7708333333333334,\n \"acc_stderr\": 0.03514697467862388,\n \"acc_norm\": 0.7708333333333334,\n \"acc_norm_stderr\": 0.03514697467862388\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.46,\n \"acc_stderr\": 0.05009082659620333,\n \"acc_norm\": 0.46,\n \"acc_norm_stderr\": 0.05009082659620333\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.52,\n \"acc_stderr\": 0.050211673156867795,\n \"acc_norm\": 0.52,\n \"acc_norm_stderr\": 0.050211673156867795\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.31,\n \"acc_stderr\": 0.04648231987117316,\n \"acc_norm\": 0.31,\n \"acc_norm_stderr\": 0.04648231987117316\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.6820809248554913,\n \"acc_stderr\": 0.0355068398916558,\n \"acc_norm\": 0.6820809248554913,\n \"acc_norm_stderr\": 0.0355068398916558\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.4117647058823529,\n \"acc_stderr\": 0.048971049527263666,\n \"acc_norm\": 0.4117647058823529,\n \"acc_norm_stderr\": 0.048971049527263666\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.76,\n \"acc_stderr\": 0.04292346959909283,\n \"acc_norm\": 0.76,\n \"acc_norm_stderr\": 0.04292346959909283\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.574468085106383,\n \"acc_stderr\": 0.03232146916224468,\n \"acc_norm\": 0.574468085106383,\n \"acc_norm_stderr\": 0.03232146916224468\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.5,\n \"acc_stderr\": 0.047036043419179864,\n \"acc_norm\": 0.5,\n \"acc_norm_stderr\": 0.047036043419179864\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.5448275862068965,\n \"acc_stderr\": 0.04149886942192117,\n \"acc_norm\": 0.5448275862068965,\n \"acc_norm_stderr\": 0.04149886942192117\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.43386243386243384,\n \"acc_stderr\": 0.025525034382474887,\n \"acc_norm\": 0.43386243386243384,\n \"acc_norm_stderr\": 0.025525034382474887\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.46825396825396826,\n \"acc_stderr\": 0.04463112720677171,\n \"acc_norm\": 0.46825396825396826,\n \"acc_norm_stderr\": 0.04463112720677171\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.31,\n \"acc_stderr\": 0.04648231987117316,\n \"acc_norm\": 0.31,\n \"acc_norm_stderr\": 0.04648231987117316\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.7774193548387097,\n \"acc_stderr\": 0.023664216671642518,\n \"acc_norm\": 0.7774193548387097,\n \"acc_norm_stderr\": 0.023664216671642518\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.5073891625615764,\n \"acc_stderr\": 0.035176035403610105,\n \"acc_norm\": 0.5073891625615764,\n \"acc_norm_stderr\": 0.035176035403610105\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.68,\n \"acc_stderr\": 0.04688261722621505,\n \"acc_norm\": 0.68,\n \"acc_norm_stderr\": 0.04688261722621505\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.7818181818181819,\n \"acc_stderr\": 0.03225078108306289,\n \"acc_norm\": 0.7818181818181819,\n \"acc_norm_stderr\": 0.03225078108306289\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.8181818181818182,\n \"acc_stderr\": 0.0274796030105388,\n \"acc_norm\": 0.8181818181818182,\n \"acc_norm_stderr\": 0.0274796030105388\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.8963730569948186,\n \"acc_stderr\": 0.02199531196364424,\n \"acc_norm\": 0.8963730569948186,\n \"acc_norm_stderr\": 0.02199531196364424\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.6564102564102564,\n \"acc_stderr\": 0.024078696580635477,\n \"acc_norm\": 0.6564102564102564,\n \"acc_norm_stderr\": 0.024078696580635477\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.34814814814814815,\n \"acc_stderr\": 0.029045600290616255,\n \"acc_norm\": 0.34814814814814815,\n \"acc_norm_stderr\": 0.029045600290616255\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.6764705882352942,\n \"acc_stderr\": 0.030388353551886793,\n \"acc_norm\": 0.6764705882352942,\n \"acc_norm_stderr\": 0.030388353551886793\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.3708609271523179,\n \"acc_stderr\": 0.03943966699183629,\n \"acc_norm\": 0.3708609271523179,\n \"acc_norm_stderr\": 0.03943966699183629\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.8458715596330275,\n \"acc_stderr\": 0.015480826865374307,\n \"acc_norm\": 0.8458715596330275,\n \"acc_norm_stderr\": 0.015480826865374307\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.49537037037037035,\n \"acc_stderr\": 0.03409825519163572,\n \"acc_norm\": 0.49537037037037035,\n \"acc_norm_stderr\": 0.03409825519163572\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.8382352941176471,\n \"acc_stderr\": 0.02584501798692692,\n \"acc_norm\": 0.8382352941176471,\n \"acc_norm_stderr\": 0.02584501798692692\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.8016877637130801,\n \"acc_stderr\": 0.02595502084162113,\n \"acc_norm\": 0.8016877637130801,\n \"acc_norm_stderr\": 0.02595502084162113\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.6816143497757847,\n \"acc_stderr\": 0.03126580522513713,\n \"acc_norm\": 0.6816143497757847,\n \"acc_norm_stderr\": 0.03126580522513713\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.8015267175572519,\n \"acc_stderr\": 0.034981493854624714,\n \"acc_norm\": 0.8015267175572519,\n \"acc_norm_stderr\": 0.034981493854624714\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.7851239669421488,\n \"acc_stderr\": 0.037494924487096966,\n \"acc_norm\": 0.7851239669421488,\n \"acc_norm_stderr\": 0.037494924487096966\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.7407407407407407,\n \"acc_stderr\": 0.04236511258094632,\n \"acc_norm\": 0.7407407407407407,\n \"acc_norm_stderr\": 0.04236511258094632\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.7607361963190185,\n \"acc_stderr\": 0.0335195387952127,\n \"acc_norm\": 0.7607361963190185,\n \"acc_norm_stderr\": 0.0335195387952127\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.45535714285714285,\n \"acc_stderr\": 0.047268355537191,\n \"acc_norm\": 0.45535714285714285,\n \"acc_norm_stderr\": 0.047268355537191\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.7669902912621359,\n \"acc_stderr\": 0.04185832598928315,\n \"acc_norm\": 0.7669902912621359,\n \"acc_norm_stderr\": 0.04185832598928315\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8846153846153846,\n \"acc_stderr\": 0.02093019318517933,\n \"acc_norm\": 0.8846153846153846,\n \"acc_norm_stderr\": 0.02093019318517933\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.72,\n \"acc_stderr\": 0.04512608598542128,\n \"acc_norm\": 0.72,\n \"acc_norm_stderr\": 0.04512608598542128\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.8237547892720306,\n \"acc_stderr\": 0.013625556907993466,\n \"acc_norm\": 0.8237547892720306,\n \"acc_norm_stderr\": 0.013625556907993466\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.7341040462427746,\n \"acc_stderr\": 0.023786203255508297,\n \"acc_norm\": 0.7341040462427746,\n \"acc_norm_stderr\": 0.023786203255508297\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.4223463687150838,\n \"acc_stderr\": 0.016519594275297117,\n \"acc_norm\": 0.4223463687150838,\n \"acc_norm_stderr\": 0.016519594275297117\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.7189542483660131,\n \"acc_stderr\": 0.02573885479781873,\n \"acc_norm\": 0.7189542483660131,\n \"acc_norm_stderr\": 0.02573885479781873\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.7106109324758842,\n \"acc_stderr\": 0.025755865922632945,\n \"acc_norm\": 0.7106109324758842,\n \"acc_norm_stderr\": 0.025755865922632945\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.7469135802469136,\n \"acc_stderr\": 0.024191808600712992,\n \"acc_norm\": 0.7469135802469136,\n \"acc_norm_stderr\": 0.024191808600712992\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.4716312056737589,\n \"acc_stderr\": 0.02977945095730307,\n \"acc_norm\": 0.4716312056737589,\n \"acc_norm_stderr\": 0.02977945095730307\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.47131681877444587,\n \"acc_stderr\": 0.01274920600765747,\n \"acc_norm\": 0.47131681877444587,\n \"acc_norm_stderr\": 0.01274920600765747\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.6764705882352942,\n \"acc_stderr\": 0.02841820861940676,\n \"acc_norm\": 0.6764705882352942,\n \"acc_norm_stderr\": 0.02841820861940676\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.6683006535947712,\n \"acc_stderr\": 0.01904748523936038,\n \"acc_norm\": 0.6683006535947712,\n \"acc_norm_stderr\": 0.01904748523936038\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6909090909090909,\n \"acc_stderr\": 0.044262946482000985,\n \"acc_norm\": 0.6909090909090909,\n \"acc_norm_stderr\": 0.044262946482000985\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.7306122448979592,\n \"acc_stderr\": 0.02840125202902294,\n \"acc_norm\": 0.7306122448979592,\n \"acc_norm_stderr\": 0.02840125202902294\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.835820895522388,\n \"acc_stderr\": 0.026193923544454115,\n \"acc_norm\": 0.835820895522388,\n \"acc_norm_stderr\": 0.026193923544454115\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.85,\n \"acc_stderr\": 0.0358870281282637,\n \"acc_norm\": 0.85,\n \"acc_norm_stderr\": 0.0358870281282637\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.5542168674698795,\n \"acc_stderr\": 0.03869543323472101,\n \"acc_norm\": 0.5542168674698795,\n \"acc_norm_stderr\": 0.03869543323472101\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.8362573099415205,\n \"acc_stderr\": 0.028380919596145866,\n \"acc_norm\": 0.8362573099415205,\n \"acc_norm_stderr\": 0.028380919596145866\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.5716034271725826,\n \"mc1_stderr\": 0.017323088597314743,\n \"mc2\": 0.7247121699417279,\n \"mc2_stderr\": 0.01469874984195087\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.856353591160221,\n \"acc_stderr\": 0.009857280052696737\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.7035633055344959,\n \"acc_stderr\": 0.012579398235589534\n }\n}\n```", "repo_url": "https://huggingface.co/alnrg2arg/blockchainlabs_test3_seminar", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_02_02T04_30_24.941518", "path": ["**/details_harness|arc:challenge|25_2024-02-02T04-30-24.941518.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-02-02T04-30-24.941518.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_02_02T04_30_24.941518", "path": ["**/details_harness|gsm8k|5_2024-02-02T04-30-24.941518.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-02-02T04-30-24.941518.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_02_02T04_30_24.941518", "path": ["**/details_harness|hellaswag|10_2024-02-02T04-30-24.941518.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-02-02T04-30-24.941518.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_02_02T04_30_24.941518", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-02T04-30-24.941518.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-02-02T04-30-24.941518.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-02-02T04-30-24.941518.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-02-02T04-30-24.941518.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-02T04-30-24.941518.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-02-02T04-30-24.941518.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-02-02T04-30-24.941518.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-02-02T04-30-24.941518.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-02-02T04-30-24.941518.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-02-02T04-30-24.941518.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-02-02T04-30-24.941518.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-02-02T04-30-24.941518.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-02T04-30-24.941518.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-02-02T04-30-24.941518.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-02T04-30-24.941518.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-02T04-30-24.941518.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-02-02T04-30-24.941518.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-02-02T04-30-24.941518.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-02-02T04-30-24.941518.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-02T04-30-24.941518.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-02T04-30-24.941518.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-02T04-30-24.941518.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-02-02T04-30-24.941518.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-02T04-30-24.941518.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-02T04-30-24.941518.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-02T04-30-24.941518.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-02T04-30-24.941518.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-02-02T04-30-24.941518.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-02T04-30-24.941518.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-02T04-30-24.941518.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-02T04-30-24.941518.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-02T04-30-24.941518.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-02-02T04-30-24.941518.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-02-02T04-30-24.941518.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-02-02T04-30-24.941518.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-02-02T04-30-24.941518.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-02T04-30-24.941518.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-02-02T04-30-24.941518.parquet", "**/details_harness|hendrycksTest-management|5_2024-02-02T04-30-24.941518.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-02-02T04-30-24.941518.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-02-02T04-30-24.941518.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-02-02T04-30-24.941518.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-02-02T04-30-24.941518.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-02T04-30-24.941518.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-02-02T04-30-24.941518.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-02-02T04-30-24.941518.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-02-02T04-30-24.941518.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-02-02T04-30-24.941518.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-02-02T04-30-24.941518.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-02-02T04-30-24.941518.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-02-02T04-30-24.941518.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-02-02T04-30-24.941518.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-02-02T04-30-24.941518.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-02-02T04-30-24.941518.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-02T04-30-24.941518.parquet", "**/details_harness|hendrycksTest-virology|5_2024-02-02T04-30-24.941518.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-02-02T04-30-24.941518.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-02T04-30-24.941518.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-02-02T04-30-24.941518.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-02-02T04-30-24.941518.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-02-02T04-30-24.941518.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-02T04-30-24.941518.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-02-02T04-30-24.941518.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-02-02T04-30-24.941518.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-02-02T04-30-24.941518.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-02-02T04-30-24.941518.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-02-02T04-30-24.941518.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-02-02T04-30-24.941518.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-02-02T04-30-24.941518.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-02T04-30-24.941518.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-02-02T04-30-24.941518.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-02T04-30-24.941518.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-02T04-30-24.941518.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-02-02T04-30-24.941518.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-02-02T04-30-24.941518.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-02-02T04-30-24.941518.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-02T04-30-24.941518.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-02T04-30-24.941518.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-02T04-30-24.941518.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-02-02T04-30-24.941518.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-02T04-30-24.941518.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-02T04-30-24.941518.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-02T04-30-24.941518.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-02T04-30-24.941518.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-02-02T04-30-24.941518.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-02T04-30-24.941518.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-02T04-30-24.941518.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-02T04-30-24.941518.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-02T04-30-24.941518.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-02-02T04-30-24.941518.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-02-02T04-30-24.941518.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-02-02T04-30-24.941518.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-02-02T04-30-24.941518.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-02T04-30-24.941518.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-02-02T04-30-24.941518.parquet", "**/details_harness|hendrycksTest-management|5_2024-02-02T04-30-24.941518.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-02-02T04-30-24.941518.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-02-02T04-30-24.941518.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-02-02T04-30-24.941518.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-02-02T04-30-24.941518.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-02T04-30-24.941518.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-02-02T04-30-24.941518.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-02-02T04-30-24.941518.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-02-02T04-30-24.941518.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-02-02T04-30-24.941518.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-02-02T04-30-24.941518.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-02-02T04-30-24.941518.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-02-02T04-30-24.941518.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-02-02T04-30-24.941518.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-02-02T04-30-24.941518.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-02-02T04-30-24.941518.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-02T04-30-24.941518.parquet", "**/details_harness|hendrycksTest-virology|5_2024-02-02T04-30-24.941518.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-02-02T04-30-24.941518.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_02_02T04_30_24.941518", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-02T04-30-24.941518.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-02T04-30-24.941518.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_02_02T04_30_24.941518", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-02-02T04-30-24.941518.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-02-02T04-30-24.941518.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_02_02T04_30_24.941518", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-02-02T04-30-24.941518.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-02-02T04-30-24.941518.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_02_02T04_30_24.941518", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-02-02T04-30-24.941518.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-02-02T04-30-24.941518.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_02_02T04_30_24.941518", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-02T04-30-24.941518.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-02T04-30-24.941518.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_02_02T04_30_24.941518", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-02-02T04-30-24.941518.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-02-02T04-30-24.941518.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_02_02T04_30_24.941518", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-02-02T04-30-24.941518.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-02-02T04-30-24.941518.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_02_02T04_30_24.941518", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-02-02T04-30-24.941518.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-02-02T04-30-24.941518.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_02_02T04_30_24.941518", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-02-02T04-30-24.941518.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-02-02T04-30-24.941518.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_02_02T04_30_24.941518", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-02-02T04-30-24.941518.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-02-02T04-30-24.941518.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_02_02T04_30_24.941518", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-02-02T04-30-24.941518.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-02-02T04-30-24.941518.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_02_02T04_30_24.941518", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-02-02T04-30-24.941518.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-02-02T04-30-24.941518.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_02_02T04_30_24.941518", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-02T04-30-24.941518.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-02T04-30-24.941518.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_02_02T04_30_24.941518", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-02-02T04-30-24.941518.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-02-02T04-30-24.941518.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_02_02T04_30_24.941518", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-02T04-30-24.941518.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-02T04-30-24.941518.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_02_02T04_30_24.941518", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-02T04-30-24.941518.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-02T04-30-24.941518.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_02_02T04_30_24.941518", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-02-02T04-30-24.941518.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-02-02T04-30-24.941518.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_02_02T04_30_24.941518", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-02-02T04-30-24.941518.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-02-02T04-30-24.941518.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_02_02T04_30_24.941518", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-02-02T04-30-24.941518.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-02-02T04-30-24.941518.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_02_02T04_30_24.941518", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-02T04-30-24.941518.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-02T04-30-24.941518.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_02_02T04_30_24.941518", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-02T04-30-24.941518.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-02T04-30-24.941518.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_02_02T04_30_24.941518", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-02T04-30-24.941518.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-02T04-30-24.941518.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_02_02T04_30_24.941518", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-02-02T04-30-24.941518.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-02-02T04-30-24.941518.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_02_02T04_30_24.941518", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-02T04-30-24.941518.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-02T04-30-24.941518.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_02_02T04_30_24.941518", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-02T04-30-24.941518.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-02T04-30-24.941518.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_02_02T04_30_24.941518", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-02T04-30-24.941518.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-02T04-30-24.941518.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_02_02T04_30_24.941518", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-02T04-30-24.941518.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-02T04-30-24.941518.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_02_02T04_30_24.941518", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-02-02T04-30-24.941518.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-02-02T04-30-24.941518.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_02_02T04_30_24.941518", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-02T04-30-24.941518.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-02T04-30-24.941518.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_02_02T04_30_24.941518", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-02T04-30-24.941518.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-02T04-30-24.941518.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_02_02T04_30_24.941518", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-02T04-30-24.941518.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-02T04-30-24.941518.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_02_02T04_30_24.941518", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-02T04-30-24.941518.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-02T04-30-24.941518.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_02_02T04_30_24.941518", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-02-02T04-30-24.941518.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-02-02T04-30-24.941518.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_02_02T04_30_24.941518", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-02-02T04-30-24.941518.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-02-02T04-30-24.941518.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_02_02T04_30_24.941518", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-02-02T04-30-24.941518.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-02-02T04-30-24.941518.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_02_02T04_30_24.941518", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-02-02T04-30-24.941518.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-02-02T04-30-24.941518.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_02_02T04_30_24.941518", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-02T04-30-24.941518.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-02T04-30-24.941518.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_02_02T04_30_24.941518", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-02-02T04-30-24.941518.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-02-02T04-30-24.941518.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_02_02T04_30_24.941518", "path": ["**/details_harness|hendrycksTest-management|5_2024-02-02T04-30-24.941518.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-02-02T04-30-24.941518.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_02_02T04_30_24.941518", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-02-02T04-30-24.941518.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-02-02T04-30-24.941518.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_02_02T04_30_24.941518", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-02-02T04-30-24.941518.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-02-02T04-30-24.941518.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_02_02T04_30_24.941518", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-02-02T04-30-24.941518.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-02-02T04-30-24.941518.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_02_02T04_30_24.941518", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-02-02T04-30-24.941518.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-02-02T04-30-24.941518.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_02_02T04_30_24.941518", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-02T04-30-24.941518.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-02T04-30-24.941518.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_02_02T04_30_24.941518", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-02-02T04-30-24.941518.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-02-02T04-30-24.941518.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_02_02T04_30_24.941518", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-02-02T04-30-24.941518.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-02-02T04-30-24.941518.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_02_02T04_30_24.941518", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-02-02T04-30-24.941518.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-02-02T04-30-24.941518.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_02_02T04_30_24.941518", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-02-02T04-30-24.941518.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-02-02T04-30-24.941518.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_02_02T04_30_24.941518", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-02-02T04-30-24.941518.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-02-02T04-30-24.941518.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_02_02T04_30_24.941518", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-02-02T04-30-24.941518.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-02-02T04-30-24.941518.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_02_02T04_30_24.941518", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-02-02T04-30-24.941518.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-02-02T04-30-24.941518.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_02_02T04_30_24.941518", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-02-02T04-30-24.941518.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-02-02T04-30-24.941518.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_02_02T04_30_24.941518", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-02-02T04-30-24.941518.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-02-02T04-30-24.941518.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_02_02T04_30_24.941518", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-02-02T04-30-24.941518.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-02-02T04-30-24.941518.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_02_02T04_30_24.941518", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-02T04-30-24.941518.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-02T04-30-24.941518.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_02_02T04_30_24.941518", "path": ["**/details_harness|hendrycksTest-virology|5_2024-02-02T04-30-24.941518.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-02-02T04-30-24.941518.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_02_02T04_30_24.941518", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-02-02T04-30-24.941518.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-02-02T04-30-24.941518.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_02_02T04_30_24.941518", "path": ["**/details_harness|truthfulqa:mc|0_2024-02-02T04-30-24.941518.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-02-02T04-30-24.941518.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_02_02T04_30_24.941518", "path": ["**/details_harness|winogrande|5_2024-02-02T04-30-24.941518.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-02-02T04-30-24.941518.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_02_02T04_30_24.941518", "path": ["results_2024-02-02T04-30-24.941518.parquet"]}, {"split": "latest", "path": ["results_2024-02-02T04-30-24.941518.parquet"]}]}]}
2024-02-02T04:33:21+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of alnrg2arg/blockchainlabs_test3_seminar Dataset automatically created during the evaluation run of model alnrg2arg/blockchainlabs_test3_seminar on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-02-02T04:30:24.941518(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of alnrg2arg/blockchainlabs_test3_seminar\n\n\n\nDataset automatically created during the evaluation run of model alnrg2arg/blockchainlabs_test3_seminar on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-02-02T04:30:24.941518(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of alnrg2arg/blockchainlabs_test3_seminar\n\n\n\nDataset automatically created during the evaluation run of model alnrg2arg/blockchainlabs_test3_seminar on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-02-02T04:30:24.941518(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
65780730bdd722510b7b2deb37cec645e58ee34d
# Dataset Card for Evaluation run of jefferylovely/SuperThetaMaven <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [jefferylovely/SuperThetaMaven](https://huggingface.co/jefferylovely/SuperThetaMaven) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_jefferylovely__SuperThetaMaven", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-02-02T04:35:28.673518](https://huggingface.co/datasets/open-llm-leaderboard/details_jefferylovely__SuperThetaMaven/blob/main/results_2024-02-02T04-35-28.673518.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.6545059772983542, "acc_stderr": 0.03205377283844669, "acc_norm": 0.6538200997878416, "acc_norm_stderr": 0.03272641534569135, "mc1": 0.5703794369645043, "mc1_stderr": 0.01732923458040909, "mc2": 0.7177387118634652, "mc2_stderr": 0.014774281827372924 }, "harness|arc:challenge|25": { "acc": 0.7107508532423208, "acc_stderr": 0.013250012579393441, "acc_norm": 0.7363481228668942, "acc_norm_stderr": 0.012875929151297042 }, "harness|hellaswag|10": { "acc": 0.7144991037641903, "acc_stderr": 0.004507296196227809, "acc_norm": 0.8899621589324835, "acc_norm_stderr": 0.0031229736320394727 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.34, "acc_stderr": 0.04760952285695235, "acc_norm": 0.34, "acc_norm_stderr": 0.04760952285695235 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.6666666666666666, "acc_stderr": 0.04072314811876837, "acc_norm": 0.6666666666666666, "acc_norm_stderr": 0.04072314811876837 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.6907894736842105, "acc_stderr": 0.037610708698674805, "acc_norm": 0.6907894736842105, "acc_norm_stderr": 0.037610708698674805 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.64, "acc_stderr": 0.04824181513244218, "acc_norm": 0.64, "acc_norm_stderr": 0.04824181513244218 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.7094339622641509, "acc_stderr": 0.02794321998933714, "acc_norm": 0.7094339622641509, "acc_norm_stderr": 0.02794321998933714 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.7638888888888888, "acc_stderr": 0.03551446610810826, "acc_norm": 0.7638888888888888, "acc_norm_stderr": 0.03551446610810826 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.47, "acc_stderr": 0.050161355804659205, "acc_norm": 0.47, "acc_norm_stderr": 0.050161355804659205 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.52, "acc_stderr": 0.050211673156867795, "acc_norm": 0.52, "acc_norm_stderr": 0.050211673156867795 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.31, "acc_stderr": 0.04648231987117316, "acc_norm": 0.31, "acc_norm_stderr": 0.04648231987117316 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.6705202312138728, "acc_stderr": 0.03583901754736412, "acc_norm": 0.6705202312138728, "acc_norm_stderr": 0.03583901754736412 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.4215686274509804, "acc_stderr": 0.04913595201274498, "acc_norm": 0.4215686274509804, "acc_norm_stderr": 0.04913595201274498 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.74, "acc_stderr": 0.04408440022768077, "acc_norm": 0.74, "acc_norm_stderr": 0.04408440022768077 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.5787234042553191, "acc_stderr": 0.03227834510146267, "acc_norm": 0.5787234042553191, "acc_norm_stderr": 0.03227834510146267 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.4824561403508772, "acc_stderr": 0.04700708033551038, "acc_norm": 0.4824561403508772, "acc_norm_stderr": 0.04700708033551038 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.5448275862068965, "acc_stderr": 0.04149886942192117, "acc_norm": 0.5448275862068965, "acc_norm_stderr": 0.04149886942192117 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.42328042328042326, "acc_stderr": 0.025446365634406783, "acc_norm": 0.42328042328042326, "acc_norm_stderr": 0.025446365634406783 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.4603174603174603, "acc_stderr": 0.04458029125470973, "acc_norm": 0.4603174603174603, "acc_norm_stderr": 0.04458029125470973 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.32, "acc_stderr": 0.04688261722621505, "acc_norm": 0.32, "acc_norm_stderr": 0.04688261722621505 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.7870967741935484, "acc_stderr": 0.023287665127268545, "acc_norm": 0.7870967741935484, "acc_norm_stderr": 0.023287665127268545 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.5123152709359606, "acc_stderr": 0.035169204442208966, "acc_norm": 0.5123152709359606, "acc_norm_stderr": 0.035169204442208966 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.68, "acc_stderr": 0.04688261722621505, "acc_norm": 0.68, "acc_norm_stderr": 0.04688261722621505 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.7818181818181819, "acc_stderr": 0.03225078108306289, "acc_norm": 0.7818181818181819, "acc_norm_stderr": 0.03225078108306289 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.803030303030303, "acc_stderr": 0.028335609732463362, "acc_norm": 0.803030303030303, "acc_norm_stderr": 0.028335609732463362 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.8860103626943006, "acc_stderr": 0.022935144053919443, "acc_norm": 0.8860103626943006, "acc_norm_stderr": 0.022935144053919443 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.6641025641025641, "acc_stderr": 0.023946724741563976, "acc_norm": 0.6641025641025641, "acc_norm_stderr": 0.023946724741563976 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.34444444444444444, "acc_stderr": 0.02897264888484427, "acc_norm": 0.34444444444444444, "acc_norm_stderr": 0.02897264888484427 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.6764705882352942, "acc_stderr": 0.030388353551886797, "acc_norm": 0.6764705882352942, "acc_norm_stderr": 0.030388353551886797 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.37748344370860926, "acc_stderr": 0.03958027231121569, "acc_norm": 0.37748344370860926, "acc_norm_stderr": 0.03958027231121569 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.8458715596330275, "acc_stderr": 0.015480826865374303, "acc_norm": 0.8458715596330275, "acc_norm_stderr": 0.015480826865374303 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.5092592592592593, "acc_stderr": 0.034093869469927006, "acc_norm": 0.5092592592592593, "acc_norm_stderr": 0.034093869469927006 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.8382352941176471, "acc_stderr": 0.02584501798692692, "acc_norm": 0.8382352941176471, "acc_norm_stderr": 0.02584501798692692 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.8143459915611815, "acc_stderr": 0.025310495376944856, "acc_norm": 0.8143459915611815, "acc_norm_stderr": 0.025310495376944856 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.6860986547085202, "acc_stderr": 0.031146796482972465, "acc_norm": 0.6860986547085202, "acc_norm_stderr": 0.031146796482972465 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.7938931297709924, "acc_stderr": 0.03547771004159463, "acc_norm": 0.7938931297709924, "acc_norm_stderr": 0.03547771004159463 }, "harness|hendrycksTest-international_law|5": { "acc": 0.7851239669421488, "acc_stderr": 0.037494924487096966, "acc_norm": 0.7851239669421488, "acc_norm_stderr": 0.037494924487096966 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.7777777777777778, "acc_stderr": 0.0401910747255735, "acc_norm": 0.7777777777777778, "acc_norm_stderr": 0.0401910747255735 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.7730061349693251, "acc_stderr": 0.03291099578615769, "acc_norm": 0.7730061349693251, "acc_norm_stderr": 0.03291099578615769 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.45535714285714285, "acc_stderr": 0.047268355537191, "acc_norm": 0.45535714285714285, "acc_norm_stderr": 0.047268355537191 }, "harness|hendrycksTest-management|5": { "acc": 0.7766990291262136, "acc_stderr": 0.04123553189891431, "acc_norm": 0.7766990291262136, "acc_norm_stderr": 0.04123553189891431 }, "harness|hendrycksTest-marketing|5": { "acc": 0.8846153846153846, "acc_stderr": 0.02093019318517933, "acc_norm": 0.8846153846153846, "acc_norm_stderr": 0.02093019318517933 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.74, "acc_stderr": 0.04408440022768078, "acc_norm": 0.74, "acc_norm_stderr": 0.04408440022768078 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.8263090676883781, "acc_stderr": 0.013547415658662264, "acc_norm": 0.8263090676883781, "acc_norm_stderr": 0.013547415658662264 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.7369942196531792, "acc_stderr": 0.023703099525258176, "acc_norm": 0.7369942196531792, "acc_norm_stderr": 0.023703099525258176 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.42905027932960893, "acc_stderr": 0.016553287863116037, "acc_norm": 0.42905027932960893, "acc_norm_stderr": 0.016553287863116037 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.7124183006535948, "acc_stderr": 0.02591780611714716, "acc_norm": 0.7124183006535948, "acc_norm_stderr": 0.02591780611714716 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.7202572347266881, "acc_stderr": 0.025494259350694912, "acc_norm": 0.7202572347266881, "acc_norm_stderr": 0.025494259350694912 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.7592592592592593, "acc_stderr": 0.023788583551658533, "acc_norm": 0.7592592592592593, "acc_norm_stderr": 0.023788583551658533 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.48936170212765956, "acc_stderr": 0.029820747191422473, "acc_norm": 0.48936170212765956, "acc_norm_stderr": 0.029820747191422473 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.470013037809648, "acc_stderr": 0.012747248967079064, "acc_norm": 0.470013037809648, "acc_norm_stderr": 0.012747248967079064 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.6801470588235294, "acc_stderr": 0.02833295951403121, "acc_norm": 0.6801470588235294, "acc_norm_stderr": 0.02833295951403121 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.6715686274509803, "acc_stderr": 0.018999707383162673, "acc_norm": 0.6715686274509803, "acc_norm_stderr": 0.018999707383162673 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.6909090909090909, "acc_stderr": 0.044262946482000985, "acc_norm": 0.6909090909090909, "acc_norm_stderr": 0.044262946482000985 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.7306122448979592, "acc_stderr": 0.02840125202902294, "acc_norm": 0.7306122448979592, "acc_norm_stderr": 0.02840125202902294 }, "harness|hendrycksTest-sociology|5": { "acc": 0.835820895522388, "acc_stderr": 0.026193923544454115, "acc_norm": 0.835820895522388, "acc_norm_stderr": 0.026193923544454115 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.86, "acc_stderr": 0.0348735088019777, "acc_norm": 0.86, "acc_norm_stderr": 0.0348735088019777 }, "harness|hendrycksTest-virology|5": { "acc": 0.5662650602409639, "acc_stderr": 0.03858158940685516, "acc_norm": 0.5662650602409639, "acc_norm_stderr": 0.03858158940685516 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.8245614035087719, "acc_stderr": 0.029170885500727665, "acc_norm": 0.8245614035087719, "acc_norm_stderr": 0.029170885500727665 }, "harness|truthfulqa:mc|0": { "mc1": 0.5703794369645043, "mc1_stderr": 0.01732923458040909, "mc2": 0.7177387118634652, "mc2_stderr": 0.014774281827372924 }, "harness|winogrande|5": { "acc": 0.8492501973164956, "acc_stderr": 0.010056094631479674 }, "harness|gsm8k|5": { "acc": 0.7012888551933283, "acc_stderr": 0.012607137125693633 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_jefferylovely__SuperThetaMaven
[ "region:us" ]
2024-02-02T04:37:49+00:00
{"pretty_name": "Evaluation run of jefferylovely/SuperThetaMaven", "dataset_summary": "Dataset automatically created during the evaluation run of model [jefferylovely/SuperThetaMaven](https://huggingface.co/jefferylovely/SuperThetaMaven) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_jefferylovely__SuperThetaMaven\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-02-02T04:35:28.673518](https://huggingface.co/datasets/open-llm-leaderboard/details_jefferylovely__SuperThetaMaven/blob/main/results_2024-02-02T04-35-28.673518.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.6545059772983542,\n \"acc_stderr\": 0.03205377283844669,\n \"acc_norm\": 0.6538200997878416,\n \"acc_norm_stderr\": 0.03272641534569135,\n \"mc1\": 0.5703794369645043,\n \"mc1_stderr\": 0.01732923458040909,\n \"mc2\": 0.7177387118634652,\n \"mc2_stderr\": 0.014774281827372924\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.7107508532423208,\n \"acc_stderr\": 0.013250012579393441,\n \"acc_norm\": 0.7363481228668942,\n \"acc_norm_stderr\": 0.012875929151297042\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.7144991037641903,\n \"acc_stderr\": 0.004507296196227809,\n \"acc_norm\": 0.8899621589324835,\n \"acc_norm_stderr\": 0.0031229736320394727\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.34,\n \"acc_stderr\": 0.04760952285695235,\n \"acc_norm\": 0.34,\n \"acc_norm_stderr\": 0.04760952285695235\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.6666666666666666,\n \"acc_stderr\": 0.04072314811876837,\n \"acc_norm\": 0.6666666666666666,\n \"acc_norm_stderr\": 0.04072314811876837\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.6907894736842105,\n \"acc_stderr\": 0.037610708698674805,\n \"acc_norm\": 0.6907894736842105,\n \"acc_norm_stderr\": 0.037610708698674805\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.64,\n \"acc_stderr\": 0.04824181513244218,\n \"acc_norm\": 0.64,\n \"acc_norm_stderr\": 0.04824181513244218\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.7094339622641509,\n \"acc_stderr\": 0.02794321998933714,\n \"acc_norm\": 0.7094339622641509,\n \"acc_norm_stderr\": 0.02794321998933714\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.7638888888888888,\n \"acc_stderr\": 0.03551446610810826,\n \"acc_norm\": 0.7638888888888888,\n \"acc_norm_stderr\": 0.03551446610810826\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.47,\n \"acc_stderr\": 0.050161355804659205,\n \"acc_norm\": 0.47,\n \"acc_norm_stderr\": 0.050161355804659205\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.52,\n \"acc_stderr\": 0.050211673156867795,\n \"acc_norm\": 0.52,\n \"acc_norm_stderr\": 0.050211673156867795\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.31,\n \"acc_stderr\": 0.04648231987117316,\n \"acc_norm\": 0.31,\n \"acc_norm_stderr\": 0.04648231987117316\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.6705202312138728,\n \"acc_stderr\": 0.03583901754736412,\n \"acc_norm\": 0.6705202312138728,\n \"acc_norm_stderr\": 0.03583901754736412\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.4215686274509804,\n \"acc_stderr\": 0.04913595201274498,\n \"acc_norm\": 0.4215686274509804,\n \"acc_norm_stderr\": 0.04913595201274498\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.74,\n \"acc_stderr\": 0.04408440022768077,\n \"acc_norm\": 0.74,\n \"acc_norm_stderr\": 0.04408440022768077\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.5787234042553191,\n \"acc_stderr\": 0.03227834510146267,\n \"acc_norm\": 0.5787234042553191,\n \"acc_norm_stderr\": 0.03227834510146267\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.4824561403508772,\n \"acc_stderr\": 0.04700708033551038,\n \"acc_norm\": 0.4824561403508772,\n \"acc_norm_stderr\": 0.04700708033551038\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.5448275862068965,\n \"acc_stderr\": 0.04149886942192117,\n \"acc_norm\": 0.5448275862068965,\n \"acc_norm_stderr\": 0.04149886942192117\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.42328042328042326,\n \"acc_stderr\": 0.025446365634406783,\n \"acc_norm\": 0.42328042328042326,\n \"acc_norm_stderr\": 0.025446365634406783\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.4603174603174603,\n \"acc_stderr\": 0.04458029125470973,\n \"acc_norm\": 0.4603174603174603,\n \"acc_norm_stderr\": 0.04458029125470973\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.32,\n \"acc_stderr\": 0.04688261722621505,\n \"acc_norm\": 0.32,\n \"acc_norm_stderr\": 0.04688261722621505\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.7870967741935484,\n \"acc_stderr\": 0.023287665127268545,\n \"acc_norm\": 0.7870967741935484,\n \"acc_norm_stderr\": 0.023287665127268545\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.5123152709359606,\n \"acc_stderr\": 0.035169204442208966,\n \"acc_norm\": 0.5123152709359606,\n \"acc_norm_stderr\": 0.035169204442208966\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.68,\n \"acc_stderr\": 0.04688261722621505,\n \"acc_norm\": 0.68,\n \"acc_norm_stderr\": 0.04688261722621505\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.7818181818181819,\n \"acc_stderr\": 0.03225078108306289,\n \"acc_norm\": 0.7818181818181819,\n \"acc_norm_stderr\": 0.03225078108306289\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.803030303030303,\n \"acc_stderr\": 0.028335609732463362,\n \"acc_norm\": 0.803030303030303,\n \"acc_norm_stderr\": 0.028335609732463362\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.8860103626943006,\n \"acc_stderr\": 0.022935144053919443,\n \"acc_norm\": 0.8860103626943006,\n \"acc_norm_stderr\": 0.022935144053919443\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.6641025641025641,\n \"acc_stderr\": 0.023946724741563976,\n \"acc_norm\": 0.6641025641025641,\n \"acc_norm_stderr\": 0.023946724741563976\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.34444444444444444,\n \"acc_stderr\": 0.02897264888484427,\n \"acc_norm\": 0.34444444444444444,\n \"acc_norm_stderr\": 0.02897264888484427\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.6764705882352942,\n \"acc_stderr\": 0.030388353551886797,\n \"acc_norm\": 0.6764705882352942,\n \"acc_norm_stderr\": 0.030388353551886797\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.37748344370860926,\n \"acc_stderr\": 0.03958027231121569,\n \"acc_norm\": 0.37748344370860926,\n \"acc_norm_stderr\": 0.03958027231121569\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.8458715596330275,\n \"acc_stderr\": 0.015480826865374303,\n \"acc_norm\": 0.8458715596330275,\n \"acc_norm_stderr\": 0.015480826865374303\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.5092592592592593,\n \"acc_stderr\": 0.034093869469927006,\n \"acc_norm\": 0.5092592592592593,\n \"acc_norm_stderr\": 0.034093869469927006\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.8382352941176471,\n \"acc_stderr\": 0.02584501798692692,\n \"acc_norm\": 0.8382352941176471,\n \"acc_norm_stderr\": 0.02584501798692692\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.8143459915611815,\n \"acc_stderr\": 0.025310495376944856,\n \"acc_norm\": 0.8143459915611815,\n \"acc_norm_stderr\": 0.025310495376944856\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.6860986547085202,\n \"acc_stderr\": 0.031146796482972465,\n \"acc_norm\": 0.6860986547085202,\n \"acc_norm_stderr\": 0.031146796482972465\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.7938931297709924,\n \"acc_stderr\": 0.03547771004159463,\n \"acc_norm\": 0.7938931297709924,\n \"acc_norm_stderr\": 0.03547771004159463\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.7851239669421488,\n \"acc_stderr\": 0.037494924487096966,\n \"acc_norm\": 0.7851239669421488,\n \"acc_norm_stderr\": 0.037494924487096966\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.7777777777777778,\n \"acc_stderr\": 0.0401910747255735,\n \"acc_norm\": 0.7777777777777778,\n \"acc_norm_stderr\": 0.0401910747255735\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.7730061349693251,\n \"acc_stderr\": 0.03291099578615769,\n \"acc_norm\": 0.7730061349693251,\n \"acc_norm_stderr\": 0.03291099578615769\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.45535714285714285,\n \"acc_stderr\": 0.047268355537191,\n \"acc_norm\": 0.45535714285714285,\n \"acc_norm_stderr\": 0.047268355537191\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.7766990291262136,\n \"acc_stderr\": 0.04123553189891431,\n \"acc_norm\": 0.7766990291262136,\n \"acc_norm_stderr\": 0.04123553189891431\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8846153846153846,\n \"acc_stderr\": 0.02093019318517933,\n \"acc_norm\": 0.8846153846153846,\n \"acc_norm_stderr\": 0.02093019318517933\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.74,\n \"acc_stderr\": 0.04408440022768078,\n \"acc_norm\": 0.74,\n \"acc_norm_stderr\": 0.04408440022768078\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.8263090676883781,\n \"acc_stderr\": 0.013547415658662264,\n \"acc_norm\": 0.8263090676883781,\n \"acc_norm_stderr\": 0.013547415658662264\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.7369942196531792,\n \"acc_stderr\": 0.023703099525258176,\n \"acc_norm\": 0.7369942196531792,\n \"acc_norm_stderr\": 0.023703099525258176\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.42905027932960893,\n \"acc_stderr\": 0.016553287863116037,\n \"acc_norm\": 0.42905027932960893,\n \"acc_norm_stderr\": 0.016553287863116037\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.7124183006535948,\n \"acc_stderr\": 0.02591780611714716,\n \"acc_norm\": 0.7124183006535948,\n \"acc_norm_stderr\": 0.02591780611714716\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.7202572347266881,\n \"acc_stderr\": 0.025494259350694912,\n \"acc_norm\": 0.7202572347266881,\n \"acc_norm_stderr\": 0.025494259350694912\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.7592592592592593,\n \"acc_stderr\": 0.023788583551658533,\n \"acc_norm\": 0.7592592592592593,\n \"acc_norm_stderr\": 0.023788583551658533\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.48936170212765956,\n \"acc_stderr\": 0.029820747191422473,\n \"acc_norm\": 0.48936170212765956,\n \"acc_norm_stderr\": 0.029820747191422473\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.470013037809648,\n \"acc_stderr\": 0.012747248967079064,\n \"acc_norm\": 0.470013037809648,\n \"acc_norm_stderr\": 0.012747248967079064\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.6801470588235294,\n \"acc_stderr\": 0.02833295951403121,\n \"acc_norm\": 0.6801470588235294,\n \"acc_norm_stderr\": 0.02833295951403121\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.6715686274509803,\n \"acc_stderr\": 0.018999707383162673,\n \"acc_norm\": 0.6715686274509803,\n \"acc_norm_stderr\": 0.018999707383162673\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6909090909090909,\n \"acc_stderr\": 0.044262946482000985,\n \"acc_norm\": 0.6909090909090909,\n \"acc_norm_stderr\": 0.044262946482000985\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.7306122448979592,\n \"acc_stderr\": 0.02840125202902294,\n \"acc_norm\": 0.7306122448979592,\n \"acc_norm_stderr\": 0.02840125202902294\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.835820895522388,\n \"acc_stderr\": 0.026193923544454115,\n \"acc_norm\": 0.835820895522388,\n \"acc_norm_stderr\": 0.026193923544454115\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.86,\n \"acc_stderr\": 0.0348735088019777,\n \"acc_norm\": 0.86,\n \"acc_norm_stderr\": 0.0348735088019777\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.5662650602409639,\n \"acc_stderr\": 0.03858158940685516,\n \"acc_norm\": 0.5662650602409639,\n \"acc_norm_stderr\": 0.03858158940685516\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.8245614035087719,\n \"acc_stderr\": 0.029170885500727665,\n \"acc_norm\": 0.8245614035087719,\n \"acc_norm_stderr\": 0.029170885500727665\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.5703794369645043,\n \"mc1_stderr\": 0.01732923458040909,\n \"mc2\": 0.7177387118634652,\n \"mc2_stderr\": 0.014774281827372924\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.8492501973164956,\n \"acc_stderr\": 0.010056094631479674\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.7012888551933283,\n \"acc_stderr\": 0.012607137125693633\n }\n}\n```", "repo_url": "https://huggingface.co/jefferylovely/SuperThetaMaven", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_02_02T04_35_28.673518", "path": ["**/details_harness|arc:challenge|25_2024-02-02T04-35-28.673518.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-02-02T04-35-28.673518.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_02_02T04_35_28.673518", "path": ["**/details_harness|gsm8k|5_2024-02-02T04-35-28.673518.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-02-02T04-35-28.673518.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_02_02T04_35_28.673518", "path": ["**/details_harness|hellaswag|10_2024-02-02T04-35-28.673518.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-02-02T04-35-28.673518.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_02_02T04_35_28.673518", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-02T04-35-28.673518.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-02-02T04-35-28.673518.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-02-02T04-35-28.673518.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-02-02T04-35-28.673518.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-02T04-35-28.673518.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-02-02T04-35-28.673518.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-02-02T04-35-28.673518.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-02-02T04-35-28.673518.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-02-02T04-35-28.673518.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-02-02T04-35-28.673518.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-02-02T04-35-28.673518.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-02-02T04-35-28.673518.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-02T04-35-28.673518.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-02-02T04-35-28.673518.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-02T04-35-28.673518.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-02T04-35-28.673518.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-02-02T04-35-28.673518.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-02-02T04-35-28.673518.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-02-02T04-35-28.673518.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-02T04-35-28.673518.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-02T04-35-28.673518.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-02T04-35-28.673518.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-02-02T04-35-28.673518.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-02T04-35-28.673518.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-02T04-35-28.673518.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-02T04-35-28.673518.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-02T04-35-28.673518.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-02-02T04-35-28.673518.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-02T04-35-28.673518.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-02T04-35-28.673518.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-02T04-35-28.673518.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-02T04-35-28.673518.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-02-02T04-35-28.673518.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-02-02T04-35-28.673518.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-02-02T04-35-28.673518.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-02-02T04-35-28.673518.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-02T04-35-28.673518.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-02-02T04-35-28.673518.parquet", "**/details_harness|hendrycksTest-management|5_2024-02-02T04-35-28.673518.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-02-02T04-35-28.673518.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-02-02T04-35-28.673518.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-02-02T04-35-28.673518.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-02-02T04-35-28.673518.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-02T04-35-28.673518.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-02-02T04-35-28.673518.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-02-02T04-35-28.673518.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-02-02T04-35-28.673518.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-02-02T04-35-28.673518.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-02-02T04-35-28.673518.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-02-02T04-35-28.673518.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-02-02T04-35-28.673518.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-02-02T04-35-28.673518.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-02-02T04-35-28.673518.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-02-02T04-35-28.673518.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-02T04-35-28.673518.parquet", "**/details_harness|hendrycksTest-virology|5_2024-02-02T04-35-28.673518.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-02-02T04-35-28.673518.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-02T04-35-28.673518.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-02-02T04-35-28.673518.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-02-02T04-35-28.673518.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-02-02T04-35-28.673518.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-02T04-35-28.673518.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-02-02T04-35-28.673518.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-02-02T04-35-28.673518.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-02-02T04-35-28.673518.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-02-02T04-35-28.673518.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-02-02T04-35-28.673518.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-02-02T04-35-28.673518.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-02-02T04-35-28.673518.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-02T04-35-28.673518.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-02-02T04-35-28.673518.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-02T04-35-28.673518.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-02T04-35-28.673518.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-02-02T04-35-28.673518.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-02-02T04-35-28.673518.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-02-02T04-35-28.673518.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-02T04-35-28.673518.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-02T04-35-28.673518.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-02T04-35-28.673518.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-02-02T04-35-28.673518.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-02T04-35-28.673518.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-02T04-35-28.673518.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-02T04-35-28.673518.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-02T04-35-28.673518.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-02-02T04-35-28.673518.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-02T04-35-28.673518.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-02T04-35-28.673518.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-02T04-35-28.673518.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-02T04-35-28.673518.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-02-02T04-35-28.673518.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-02-02T04-35-28.673518.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-02-02T04-35-28.673518.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-02-02T04-35-28.673518.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-02T04-35-28.673518.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-02-02T04-35-28.673518.parquet", "**/details_harness|hendrycksTest-management|5_2024-02-02T04-35-28.673518.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-02-02T04-35-28.673518.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-02-02T04-35-28.673518.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-02-02T04-35-28.673518.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-02-02T04-35-28.673518.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-02T04-35-28.673518.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-02-02T04-35-28.673518.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-02-02T04-35-28.673518.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-02-02T04-35-28.673518.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-02-02T04-35-28.673518.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-02-02T04-35-28.673518.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-02-02T04-35-28.673518.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-02-02T04-35-28.673518.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-02-02T04-35-28.673518.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-02-02T04-35-28.673518.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-02-02T04-35-28.673518.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-02T04-35-28.673518.parquet", "**/details_harness|hendrycksTest-virology|5_2024-02-02T04-35-28.673518.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-02-02T04-35-28.673518.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_02_02T04_35_28.673518", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-02T04-35-28.673518.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-02T04-35-28.673518.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_02_02T04_35_28.673518", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-02-02T04-35-28.673518.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-02-02T04-35-28.673518.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_02_02T04_35_28.673518", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-02-02T04-35-28.673518.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-02-02T04-35-28.673518.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_02_02T04_35_28.673518", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-02-02T04-35-28.673518.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-02-02T04-35-28.673518.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_02_02T04_35_28.673518", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-02T04-35-28.673518.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-02T04-35-28.673518.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_02_02T04_35_28.673518", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-02-02T04-35-28.673518.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-02-02T04-35-28.673518.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_02_02T04_35_28.673518", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-02-02T04-35-28.673518.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-02-02T04-35-28.673518.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_02_02T04_35_28.673518", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-02-02T04-35-28.673518.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-02-02T04-35-28.673518.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_02_02T04_35_28.673518", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-02-02T04-35-28.673518.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-02-02T04-35-28.673518.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_02_02T04_35_28.673518", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-02-02T04-35-28.673518.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-02-02T04-35-28.673518.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_02_02T04_35_28.673518", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-02-02T04-35-28.673518.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-02-02T04-35-28.673518.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_02_02T04_35_28.673518", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-02-02T04-35-28.673518.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-02-02T04-35-28.673518.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_02_02T04_35_28.673518", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-02T04-35-28.673518.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-02T04-35-28.673518.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_02_02T04_35_28.673518", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-02-02T04-35-28.673518.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-02-02T04-35-28.673518.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_02_02T04_35_28.673518", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-02T04-35-28.673518.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-02T04-35-28.673518.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_02_02T04_35_28.673518", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-02T04-35-28.673518.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-02T04-35-28.673518.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_02_02T04_35_28.673518", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-02-02T04-35-28.673518.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-02-02T04-35-28.673518.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_02_02T04_35_28.673518", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-02-02T04-35-28.673518.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-02-02T04-35-28.673518.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_02_02T04_35_28.673518", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-02-02T04-35-28.673518.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-02-02T04-35-28.673518.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_02_02T04_35_28.673518", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-02T04-35-28.673518.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-02T04-35-28.673518.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_02_02T04_35_28.673518", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-02T04-35-28.673518.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-02T04-35-28.673518.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_02_02T04_35_28.673518", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-02T04-35-28.673518.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-02T04-35-28.673518.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_02_02T04_35_28.673518", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-02-02T04-35-28.673518.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-02-02T04-35-28.673518.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_02_02T04_35_28.673518", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-02T04-35-28.673518.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-02T04-35-28.673518.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_02_02T04_35_28.673518", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-02T04-35-28.673518.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-02T04-35-28.673518.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_02_02T04_35_28.673518", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-02T04-35-28.673518.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-02T04-35-28.673518.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_02_02T04_35_28.673518", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-02T04-35-28.673518.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-02T04-35-28.673518.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_02_02T04_35_28.673518", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-02-02T04-35-28.673518.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-02-02T04-35-28.673518.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_02_02T04_35_28.673518", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-02T04-35-28.673518.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-02T04-35-28.673518.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_02_02T04_35_28.673518", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-02T04-35-28.673518.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-02T04-35-28.673518.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_02_02T04_35_28.673518", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-02T04-35-28.673518.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-02T04-35-28.673518.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_02_02T04_35_28.673518", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-02T04-35-28.673518.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-02T04-35-28.673518.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_02_02T04_35_28.673518", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-02-02T04-35-28.673518.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-02-02T04-35-28.673518.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_02_02T04_35_28.673518", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-02-02T04-35-28.673518.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-02-02T04-35-28.673518.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_02_02T04_35_28.673518", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-02-02T04-35-28.673518.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-02-02T04-35-28.673518.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_02_02T04_35_28.673518", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-02-02T04-35-28.673518.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-02-02T04-35-28.673518.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_02_02T04_35_28.673518", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-02T04-35-28.673518.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-02T04-35-28.673518.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_02_02T04_35_28.673518", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-02-02T04-35-28.673518.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-02-02T04-35-28.673518.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_02_02T04_35_28.673518", "path": ["**/details_harness|hendrycksTest-management|5_2024-02-02T04-35-28.673518.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-02-02T04-35-28.673518.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_02_02T04_35_28.673518", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-02-02T04-35-28.673518.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-02-02T04-35-28.673518.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_02_02T04_35_28.673518", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-02-02T04-35-28.673518.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-02-02T04-35-28.673518.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_02_02T04_35_28.673518", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-02-02T04-35-28.673518.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-02-02T04-35-28.673518.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_02_02T04_35_28.673518", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-02-02T04-35-28.673518.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-02-02T04-35-28.673518.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_02_02T04_35_28.673518", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-02T04-35-28.673518.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-02T04-35-28.673518.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_02_02T04_35_28.673518", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-02-02T04-35-28.673518.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-02-02T04-35-28.673518.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_02_02T04_35_28.673518", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-02-02T04-35-28.673518.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-02-02T04-35-28.673518.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_02_02T04_35_28.673518", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-02-02T04-35-28.673518.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-02-02T04-35-28.673518.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_02_02T04_35_28.673518", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-02-02T04-35-28.673518.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-02-02T04-35-28.673518.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_02_02T04_35_28.673518", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-02-02T04-35-28.673518.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-02-02T04-35-28.673518.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_02_02T04_35_28.673518", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-02-02T04-35-28.673518.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-02-02T04-35-28.673518.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_02_02T04_35_28.673518", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-02-02T04-35-28.673518.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-02-02T04-35-28.673518.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_02_02T04_35_28.673518", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-02-02T04-35-28.673518.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-02-02T04-35-28.673518.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_02_02T04_35_28.673518", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-02-02T04-35-28.673518.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-02-02T04-35-28.673518.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_02_02T04_35_28.673518", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-02-02T04-35-28.673518.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-02-02T04-35-28.673518.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_02_02T04_35_28.673518", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-02T04-35-28.673518.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-02T04-35-28.673518.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_02_02T04_35_28.673518", "path": ["**/details_harness|hendrycksTest-virology|5_2024-02-02T04-35-28.673518.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-02-02T04-35-28.673518.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_02_02T04_35_28.673518", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-02-02T04-35-28.673518.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-02-02T04-35-28.673518.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_02_02T04_35_28.673518", "path": ["**/details_harness|truthfulqa:mc|0_2024-02-02T04-35-28.673518.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-02-02T04-35-28.673518.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_02_02T04_35_28.673518", "path": ["**/details_harness|winogrande|5_2024-02-02T04-35-28.673518.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-02-02T04-35-28.673518.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_02_02T04_35_28.673518", "path": ["results_2024-02-02T04-35-28.673518.parquet"]}, {"split": "latest", "path": ["results_2024-02-02T04-35-28.673518.parquet"]}]}]}
2024-02-02T04:38:12+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of jefferylovely/SuperThetaMaven Dataset automatically created during the evaluation run of model jefferylovely/SuperThetaMaven on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-02-02T04:35:28.673518(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of jefferylovely/SuperThetaMaven\n\n\n\nDataset automatically created during the evaluation run of model jefferylovely/SuperThetaMaven on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-02-02T04:35:28.673518(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of jefferylovely/SuperThetaMaven\n\n\n\nDataset automatically created during the evaluation run of model jefferylovely/SuperThetaMaven on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-02-02T04:35:28.673518(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
9ef058ca5b28c875e1e5b4d145061a44105dd7f1
# Dataset Card for Evaluation run of Lvxy1117/amber_fine_tune_ori <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [Lvxy1117/amber_fine_tune_ori](https://huggingface.co/Lvxy1117/amber_fine_tune_ori) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_Lvxy1117__amber_fine_tune_ori", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-02-02T04:45:56.417664](https://huggingface.co/datasets/open-llm-leaderboard/details_Lvxy1117__amber_fine_tune_ori/blob/main/results_2024-02-02T04-45-56.417664.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.2703950535933958, "acc_stderr": 0.03111275987496194, "acc_norm": 0.2718823298213839, "acc_norm_stderr": 0.03187597464930614, "mc1": 0.20930232558139536, "mc1_stderr": 0.014241219434785827, "mc2": 0.3493607326810005, "mc2_stderr": 0.015039024893260722 }, "harness|arc:challenge|25": { "acc": 0.42406143344709896, "acc_stderr": 0.0144418896274644, "acc_norm": 0.4445392491467577, "acc_norm_stderr": 0.014521226405627079 }, "harness|hellaswag|10": { "acc": 0.5802628958374826, "acc_stderr": 0.004925072159723838, "acc_norm": 0.7510456084445329, "acc_norm_stderr": 0.004315236154543956 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.28, "acc_stderr": 0.04512608598542128, "acc_norm": 0.28, "acc_norm_stderr": 0.04512608598542128 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.22962962962962963, "acc_stderr": 0.03633384414073465, "acc_norm": 0.22962962962962963, "acc_norm_stderr": 0.03633384414073465 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.16447368421052633, "acc_stderr": 0.030167533468632726, "acc_norm": 0.16447368421052633, "acc_norm_stderr": 0.030167533468632726 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.33, "acc_stderr": 0.047258156262526045, "acc_norm": 0.33, "acc_norm_stderr": 0.047258156262526045 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.27547169811320754, "acc_stderr": 0.02749566368372406, "acc_norm": 0.27547169811320754, "acc_norm_stderr": 0.02749566368372406 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.24305555555555555, "acc_stderr": 0.03586879280080339, "acc_norm": 0.24305555555555555, "acc_norm_stderr": 0.03586879280080339 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.19, "acc_stderr": 0.03942772444036623, "acc_norm": 0.19, "acc_norm_stderr": 0.03942772444036623 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.22, "acc_stderr": 0.04163331998932269, "acc_norm": 0.22, "acc_norm_stderr": 0.04163331998932269 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.22, "acc_stderr": 0.04163331998932268, "acc_norm": 0.22, "acc_norm_stderr": 0.04163331998932268 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.2254335260115607, "acc_stderr": 0.03186209851641144, "acc_norm": 0.2254335260115607, "acc_norm_stderr": 0.03186209851641144 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.19607843137254902, "acc_stderr": 0.03950581861179961, "acc_norm": 0.19607843137254902, "acc_norm_stderr": 0.03950581861179961 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.28, "acc_stderr": 0.04512608598542127, "acc_norm": 0.28, "acc_norm_stderr": 0.04512608598542127 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.3276595744680851, "acc_stderr": 0.030683020843231008, "acc_norm": 0.3276595744680851, "acc_norm_stderr": 0.030683020843231008 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.24561403508771928, "acc_stderr": 0.0404933929774814, "acc_norm": 0.24561403508771928, "acc_norm_stderr": 0.0404933929774814 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.2620689655172414, "acc_stderr": 0.036646663372252565, "acc_norm": 0.2620689655172414, "acc_norm_stderr": 0.036646663372252565 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.24867724867724866, "acc_stderr": 0.02226181769240016, "acc_norm": 0.24867724867724866, "acc_norm_stderr": 0.02226181769240016 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.2619047619047619, "acc_stderr": 0.03932537680392871, "acc_norm": 0.2619047619047619, "acc_norm_stderr": 0.03932537680392871 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.32, "acc_stderr": 0.046882617226215034, "acc_norm": 0.32, "acc_norm_stderr": 0.046882617226215034 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.2032258064516129, "acc_stderr": 0.022891687984554966, "acc_norm": 0.2032258064516129, "acc_norm_stderr": 0.022891687984554966 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.26108374384236455, "acc_stderr": 0.030903796952114485, "acc_norm": 0.26108374384236455, "acc_norm_stderr": 0.030903796952114485 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.26, "acc_stderr": 0.0440844002276808, "acc_norm": 0.26, "acc_norm_stderr": 0.0440844002276808 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.24242424242424243, "acc_stderr": 0.03346409881055953, "acc_norm": 0.24242424242424243, "acc_norm_stderr": 0.03346409881055953 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.25757575757575757, "acc_stderr": 0.03115626951964683, "acc_norm": 0.25757575757575757, "acc_norm_stderr": 0.03115626951964683 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.21243523316062177, "acc_stderr": 0.02951928261681723, "acc_norm": 0.21243523316062177, "acc_norm_stderr": 0.02951928261681723 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.2153846153846154, "acc_stderr": 0.020843034557462878, "acc_norm": 0.2153846153846154, "acc_norm_stderr": 0.020843034557462878 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.26666666666666666, "acc_stderr": 0.026962424325073835, "acc_norm": 0.26666666666666666, "acc_norm_stderr": 0.026962424325073835 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.2184873949579832, "acc_stderr": 0.02684151432295893, "acc_norm": 0.2184873949579832, "acc_norm_stderr": 0.02684151432295893 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.2185430463576159, "acc_stderr": 0.03374235550425694, "acc_norm": 0.2185430463576159, "acc_norm_stderr": 0.03374235550425694 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.24036697247706423, "acc_stderr": 0.01832060732096407, "acc_norm": 0.24036697247706423, "acc_norm_stderr": 0.01832060732096407 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.1574074074074074, "acc_stderr": 0.024837173518242387, "acc_norm": 0.1574074074074074, "acc_norm_stderr": 0.024837173518242387 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.23039215686274508, "acc_stderr": 0.029554292605695063, "acc_norm": 0.23039215686274508, "acc_norm_stderr": 0.029554292605695063 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.26582278481012656, "acc_stderr": 0.028756799629658346, "acc_norm": 0.26582278481012656, "acc_norm_stderr": 0.028756799629658346 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.39461883408071746, "acc_stderr": 0.03280400504755291, "acc_norm": 0.39461883408071746, "acc_norm_stderr": 0.03280400504755291 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.3053435114503817, "acc_stderr": 0.04039314978724561, "acc_norm": 0.3053435114503817, "acc_norm_stderr": 0.04039314978724561 }, "harness|hendrycksTest-international_law|5": { "acc": 0.23140495867768596, "acc_stderr": 0.03849856098794089, "acc_norm": 0.23140495867768596, "acc_norm_stderr": 0.03849856098794089 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.2962962962962963, "acc_stderr": 0.044143436668549335, "acc_norm": 0.2962962962962963, "acc_norm_stderr": 0.044143436668549335 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.22699386503067484, "acc_stderr": 0.032910995786157686, "acc_norm": 0.22699386503067484, "acc_norm_stderr": 0.032910995786157686 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.3392857142857143, "acc_stderr": 0.0449394906861354, "acc_norm": 0.3392857142857143, "acc_norm_stderr": 0.0449394906861354 }, "harness|hendrycksTest-management|5": { "acc": 0.24271844660194175, "acc_stderr": 0.04245022486384495, "acc_norm": 0.24271844660194175, "acc_norm_stderr": 0.04245022486384495 }, "harness|hendrycksTest-marketing|5": { "acc": 0.32905982905982906, "acc_stderr": 0.030782321577688156, "acc_norm": 0.32905982905982906, "acc_norm_stderr": 0.030782321577688156 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.31, "acc_stderr": 0.04648231987117316, "acc_norm": 0.31, "acc_norm_stderr": 0.04648231987117316 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.3128991060025543, "acc_stderr": 0.01658093594030406, "acc_norm": 0.3128991060025543, "acc_norm_stderr": 0.01658093594030406 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.27167630057803466, "acc_stderr": 0.02394851290546835, "acc_norm": 0.27167630057803466, "acc_norm_stderr": 0.02394851290546835 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.2435754189944134, "acc_stderr": 0.014355911964767864, "acc_norm": 0.2435754189944134, "acc_norm_stderr": 0.014355911964767864 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.2777777777777778, "acc_stderr": 0.02564686309713791, "acc_norm": 0.2777777777777778, "acc_norm_stderr": 0.02564686309713791 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.24758842443729903, "acc_stderr": 0.024513879973621967, "acc_norm": 0.24758842443729903, "acc_norm_stderr": 0.024513879973621967 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.2932098765432099, "acc_stderr": 0.025329888171900922, "acc_norm": 0.2932098765432099, "acc_norm_stderr": 0.025329888171900922 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.24822695035460993, "acc_stderr": 0.025770015644290382, "acc_norm": 0.24822695035460993, "acc_norm_stderr": 0.025770015644290382 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.27640156453715775, "acc_stderr": 0.01142215319455358, "acc_norm": 0.27640156453715775, "acc_norm_stderr": 0.01142215319455358 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.1948529411764706, "acc_stderr": 0.024060599423487414, "acc_norm": 0.1948529411764706, "acc_norm_stderr": 0.024060599423487414 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.2696078431372549, "acc_stderr": 0.017952449196987862, "acc_norm": 0.2696078431372549, "acc_norm_stderr": 0.017952449196987862 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.2727272727272727, "acc_stderr": 0.04265792110940588, "acc_norm": 0.2727272727272727, "acc_norm_stderr": 0.04265792110940588 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.19183673469387755, "acc_stderr": 0.02520696315422541, "acc_norm": 0.19183673469387755, "acc_norm_stderr": 0.02520696315422541 }, "harness|hendrycksTest-sociology|5": { "acc": 0.23880597014925373, "acc_stderr": 0.030147775935409217, "acc_norm": 0.23880597014925373, "acc_norm_stderr": 0.030147775935409217 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.32, "acc_stderr": 0.046882617226215034, "acc_norm": 0.32, "acc_norm_stderr": 0.046882617226215034 }, "harness|hendrycksTest-virology|5": { "acc": 0.3313253012048193, "acc_stderr": 0.036643147772880864, "acc_norm": 0.3313253012048193, "acc_norm_stderr": 0.036643147772880864 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.4093567251461988, "acc_stderr": 0.037712831076265434, "acc_norm": 0.4093567251461988, "acc_norm_stderr": 0.037712831076265434 }, "harness|truthfulqa:mc|0": { "mc1": 0.20930232558139536, "mc1_stderr": 0.014241219434785827, "mc2": 0.3493607326810005, "mc2_stderr": 0.015039024893260722 }, "harness|winogrande|5": { "acc": 0.6314127861089187, "acc_stderr": 0.013558447570099323 }, "harness|gsm8k|5": { "acc": 0.01288855193328279, "acc_stderr": 0.0031069012664996674 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_Lvxy1117__amber_fine_tune_ori
[ "region:us" ]
2024-02-02T04:47:51+00:00
{"pretty_name": "Evaluation run of Lvxy1117/amber_fine_tune_ori", "dataset_summary": "Dataset automatically created during the evaluation run of model [Lvxy1117/amber_fine_tune_ori](https://huggingface.co/Lvxy1117/amber_fine_tune_ori) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_Lvxy1117__amber_fine_tune_ori\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-02-02T04:45:56.417664](https://huggingface.co/datasets/open-llm-leaderboard/details_Lvxy1117__amber_fine_tune_ori/blob/main/results_2024-02-02T04-45-56.417664.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.2703950535933958,\n \"acc_stderr\": 0.03111275987496194,\n \"acc_norm\": 0.2718823298213839,\n \"acc_norm_stderr\": 0.03187597464930614,\n \"mc1\": 0.20930232558139536,\n \"mc1_stderr\": 0.014241219434785827,\n \"mc2\": 0.3493607326810005,\n \"mc2_stderr\": 0.015039024893260722\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.42406143344709896,\n \"acc_stderr\": 0.0144418896274644,\n \"acc_norm\": 0.4445392491467577,\n \"acc_norm_stderr\": 0.014521226405627079\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.5802628958374826,\n \"acc_stderr\": 0.004925072159723838,\n \"acc_norm\": 0.7510456084445329,\n \"acc_norm_stderr\": 0.004315236154543956\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.28,\n \"acc_stderr\": 0.04512608598542128,\n \"acc_norm\": 0.28,\n \"acc_norm_stderr\": 0.04512608598542128\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.22962962962962963,\n \"acc_stderr\": 0.03633384414073465,\n \"acc_norm\": 0.22962962962962963,\n \"acc_norm_stderr\": 0.03633384414073465\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.16447368421052633,\n \"acc_stderr\": 0.030167533468632726,\n \"acc_norm\": 0.16447368421052633,\n \"acc_norm_stderr\": 0.030167533468632726\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.33,\n \"acc_stderr\": 0.047258156262526045,\n \"acc_norm\": 0.33,\n \"acc_norm_stderr\": 0.047258156262526045\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.27547169811320754,\n \"acc_stderr\": 0.02749566368372406,\n \"acc_norm\": 0.27547169811320754,\n \"acc_norm_stderr\": 0.02749566368372406\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.24305555555555555,\n \"acc_stderr\": 0.03586879280080339,\n \"acc_norm\": 0.24305555555555555,\n \"acc_norm_stderr\": 0.03586879280080339\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.19,\n \"acc_stderr\": 0.03942772444036623,\n \"acc_norm\": 0.19,\n \"acc_norm_stderr\": 0.03942772444036623\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.22,\n \"acc_stderr\": 0.04163331998932269,\n \"acc_norm\": 0.22,\n \"acc_norm_stderr\": 0.04163331998932269\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.22,\n \"acc_stderr\": 0.04163331998932268,\n \"acc_norm\": 0.22,\n \"acc_norm_stderr\": 0.04163331998932268\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.2254335260115607,\n \"acc_stderr\": 0.03186209851641144,\n \"acc_norm\": 0.2254335260115607,\n \"acc_norm_stderr\": 0.03186209851641144\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.19607843137254902,\n \"acc_stderr\": 0.03950581861179961,\n \"acc_norm\": 0.19607843137254902,\n \"acc_norm_stderr\": 0.03950581861179961\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.28,\n \"acc_stderr\": 0.04512608598542127,\n \"acc_norm\": 0.28,\n \"acc_norm_stderr\": 0.04512608598542127\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.3276595744680851,\n \"acc_stderr\": 0.030683020843231008,\n \"acc_norm\": 0.3276595744680851,\n \"acc_norm_stderr\": 0.030683020843231008\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.24561403508771928,\n \"acc_stderr\": 0.0404933929774814,\n \"acc_norm\": 0.24561403508771928,\n \"acc_norm_stderr\": 0.0404933929774814\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.2620689655172414,\n \"acc_stderr\": 0.036646663372252565,\n \"acc_norm\": 0.2620689655172414,\n \"acc_norm_stderr\": 0.036646663372252565\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.24867724867724866,\n \"acc_stderr\": 0.02226181769240016,\n \"acc_norm\": 0.24867724867724866,\n \"acc_norm_stderr\": 0.02226181769240016\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.2619047619047619,\n \"acc_stderr\": 0.03932537680392871,\n \"acc_norm\": 0.2619047619047619,\n \"acc_norm_stderr\": 0.03932537680392871\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.32,\n \"acc_stderr\": 0.046882617226215034,\n \"acc_norm\": 0.32,\n \"acc_norm_stderr\": 0.046882617226215034\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.2032258064516129,\n \"acc_stderr\": 0.022891687984554966,\n \"acc_norm\": 0.2032258064516129,\n \"acc_norm_stderr\": 0.022891687984554966\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.26108374384236455,\n \"acc_stderr\": 0.030903796952114485,\n \"acc_norm\": 0.26108374384236455,\n \"acc_norm_stderr\": 0.030903796952114485\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.26,\n \"acc_stderr\": 0.0440844002276808,\n \"acc_norm\": 0.26,\n \"acc_norm_stderr\": 0.0440844002276808\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.24242424242424243,\n \"acc_stderr\": 0.03346409881055953,\n \"acc_norm\": 0.24242424242424243,\n \"acc_norm_stderr\": 0.03346409881055953\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.25757575757575757,\n \"acc_stderr\": 0.03115626951964683,\n \"acc_norm\": 0.25757575757575757,\n \"acc_norm_stderr\": 0.03115626951964683\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.21243523316062177,\n \"acc_stderr\": 0.02951928261681723,\n \"acc_norm\": 0.21243523316062177,\n \"acc_norm_stderr\": 0.02951928261681723\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.2153846153846154,\n \"acc_stderr\": 0.020843034557462878,\n \"acc_norm\": 0.2153846153846154,\n \"acc_norm_stderr\": 0.020843034557462878\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.26666666666666666,\n \"acc_stderr\": 0.026962424325073835,\n \"acc_norm\": 0.26666666666666666,\n \"acc_norm_stderr\": 0.026962424325073835\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.2184873949579832,\n \"acc_stderr\": 0.02684151432295893,\n \"acc_norm\": 0.2184873949579832,\n \"acc_norm_stderr\": 0.02684151432295893\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.2185430463576159,\n \"acc_stderr\": 0.03374235550425694,\n \"acc_norm\": 0.2185430463576159,\n \"acc_norm_stderr\": 0.03374235550425694\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.24036697247706423,\n \"acc_stderr\": 0.01832060732096407,\n \"acc_norm\": 0.24036697247706423,\n \"acc_norm_stderr\": 0.01832060732096407\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.1574074074074074,\n \"acc_stderr\": 0.024837173518242387,\n \"acc_norm\": 0.1574074074074074,\n \"acc_norm_stderr\": 0.024837173518242387\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.23039215686274508,\n \"acc_stderr\": 0.029554292605695063,\n \"acc_norm\": 0.23039215686274508,\n \"acc_norm_stderr\": 0.029554292605695063\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.26582278481012656,\n \"acc_stderr\": 0.028756799629658346,\n \"acc_norm\": 0.26582278481012656,\n \"acc_norm_stderr\": 0.028756799629658346\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.39461883408071746,\n \"acc_stderr\": 0.03280400504755291,\n \"acc_norm\": 0.39461883408071746,\n \"acc_norm_stderr\": 0.03280400504755291\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.3053435114503817,\n \"acc_stderr\": 0.04039314978724561,\n \"acc_norm\": 0.3053435114503817,\n \"acc_norm_stderr\": 0.04039314978724561\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.23140495867768596,\n \"acc_stderr\": 0.03849856098794089,\n \"acc_norm\": 0.23140495867768596,\n \"acc_norm_stderr\": 0.03849856098794089\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.2962962962962963,\n \"acc_stderr\": 0.044143436668549335,\n \"acc_norm\": 0.2962962962962963,\n \"acc_norm_stderr\": 0.044143436668549335\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.22699386503067484,\n \"acc_stderr\": 0.032910995786157686,\n \"acc_norm\": 0.22699386503067484,\n \"acc_norm_stderr\": 0.032910995786157686\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.3392857142857143,\n \"acc_stderr\": 0.0449394906861354,\n \"acc_norm\": 0.3392857142857143,\n \"acc_norm_stderr\": 0.0449394906861354\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.24271844660194175,\n \"acc_stderr\": 0.04245022486384495,\n \"acc_norm\": 0.24271844660194175,\n \"acc_norm_stderr\": 0.04245022486384495\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.32905982905982906,\n \"acc_stderr\": 0.030782321577688156,\n \"acc_norm\": 0.32905982905982906,\n \"acc_norm_stderr\": 0.030782321577688156\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.31,\n \"acc_stderr\": 0.04648231987117316,\n \"acc_norm\": 0.31,\n \"acc_norm_stderr\": 0.04648231987117316\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.3128991060025543,\n \"acc_stderr\": 0.01658093594030406,\n \"acc_norm\": 0.3128991060025543,\n \"acc_norm_stderr\": 0.01658093594030406\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.27167630057803466,\n \"acc_stderr\": 0.02394851290546835,\n \"acc_norm\": 0.27167630057803466,\n \"acc_norm_stderr\": 0.02394851290546835\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.2435754189944134,\n \"acc_stderr\": 0.014355911964767864,\n \"acc_norm\": 0.2435754189944134,\n \"acc_norm_stderr\": 0.014355911964767864\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.2777777777777778,\n \"acc_stderr\": 0.02564686309713791,\n \"acc_norm\": 0.2777777777777778,\n \"acc_norm_stderr\": 0.02564686309713791\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.24758842443729903,\n \"acc_stderr\": 0.024513879973621967,\n \"acc_norm\": 0.24758842443729903,\n \"acc_norm_stderr\": 0.024513879973621967\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.2932098765432099,\n \"acc_stderr\": 0.025329888171900922,\n \"acc_norm\": 0.2932098765432099,\n \"acc_norm_stderr\": 0.025329888171900922\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.24822695035460993,\n \"acc_stderr\": 0.025770015644290382,\n \"acc_norm\": 0.24822695035460993,\n \"acc_norm_stderr\": 0.025770015644290382\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.27640156453715775,\n \"acc_stderr\": 0.01142215319455358,\n \"acc_norm\": 0.27640156453715775,\n \"acc_norm_stderr\": 0.01142215319455358\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.1948529411764706,\n \"acc_stderr\": 0.024060599423487414,\n \"acc_norm\": 0.1948529411764706,\n \"acc_norm_stderr\": 0.024060599423487414\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.2696078431372549,\n \"acc_stderr\": 0.017952449196987862,\n \"acc_norm\": 0.2696078431372549,\n \"acc_norm_stderr\": 0.017952449196987862\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.2727272727272727,\n \"acc_stderr\": 0.04265792110940588,\n \"acc_norm\": 0.2727272727272727,\n \"acc_norm_stderr\": 0.04265792110940588\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.19183673469387755,\n \"acc_stderr\": 0.02520696315422541,\n \"acc_norm\": 0.19183673469387755,\n \"acc_norm_stderr\": 0.02520696315422541\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.23880597014925373,\n \"acc_stderr\": 0.030147775935409217,\n \"acc_norm\": 0.23880597014925373,\n \"acc_norm_stderr\": 0.030147775935409217\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.32,\n \"acc_stderr\": 0.046882617226215034,\n \"acc_norm\": 0.32,\n \"acc_norm_stderr\": 0.046882617226215034\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.3313253012048193,\n \"acc_stderr\": 0.036643147772880864,\n \"acc_norm\": 0.3313253012048193,\n \"acc_norm_stderr\": 0.036643147772880864\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.4093567251461988,\n \"acc_stderr\": 0.037712831076265434,\n \"acc_norm\": 0.4093567251461988,\n \"acc_norm_stderr\": 0.037712831076265434\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.20930232558139536,\n \"mc1_stderr\": 0.014241219434785827,\n \"mc2\": 0.3493607326810005,\n \"mc2_stderr\": 0.015039024893260722\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.6314127861089187,\n \"acc_stderr\": 0.013558447570099323\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.01288855193328279,\n \"acc_stderr\": 0.0031069012664996674\n }\n}\n```", "repo_url": "https://huggingface.co/Lvxy1117/amber_fine_tune_ori", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_02_02T04_45_56.417664", "path": ["**/details_harness|arc:challenge|25_2024-02-02T04-45-56.417664.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-02-02T04-45-56.417664.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_02_02T04_45_56.417664", "path": ["**/details_harness|gsm8k|5_2024-02-02T04-45-56.417664.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-02-02T04-45-56.417664.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_02_02T04_45_56.417664", "path": ["**/details_harness|hellaswag|10_2024-02-02T04-45-56.417664.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-02-02T04-45-56.417664.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_02_02T04_45_56.417664", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-02T04-45-56.417664.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-02-02T04-45-56.417664.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-02-02T04-45-56.417664.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-02-02T04-45-56.417664.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-02T04-45-56.417664.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-02-02T04-45-56.417664.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-02-02T04-45-56.417664.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-02-02T04-45-56.417664.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-02-02T04-45-56.417664.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-02-02T04-45-56.417664.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-02-02T04-45-56.417664.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-02-02T04-45-56.417664.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-02T04-45-56.417664.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-02-02T04-45-56.417664.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-02T04-45-56.417664.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-02T04-45-56.417664.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-02-02T04-45-56.417664.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-02-02T04-45-56.417664.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-02-02T04-45-56.417664.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-02T04-45-56.417664.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-02T04-45-56.417664.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-02T04-45-56.417664.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-02-02T04-45-56.417664.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-02T04-45-56.417664.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-02T04-45-56.417664.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-02T04-45-56.417664.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-02T04-45-56.417664.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-02-02T04-45-56.417664.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-02T04-45-56.417664.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-02T04-45-56.417664.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-02T04-45-56.417664.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-02T04-45-56.417664.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-02-02T04-45-56.417664.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-02-02T04-45-56.417664.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-02-02T04-45-56.417664.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-02-02T04-45-56.417664.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-02T04-45-56.417664.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-02-02T04-45-56.417664.parquet", "**/details_harness|hendrycksTest-management|5_2024-02-02T04-45-56.417664.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-02-02T04-45-56.417664.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-02-02T04-45-56.417664.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-02-02T04-45-56.417664.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-02-02T04-45-56.417664.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-02T04-45-56.417664.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-02-02T04-45-56.417664.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-02-02T04-45-56.417664.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-02-02T04-45-56.417664.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-02-02T04-45-56.417664.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-02-02T04-45-56.417664.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-02-02T04-45-56.417664.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-02-02T04-45-56.417664.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-02-02T04-45-56.417664.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-02-02T04-45-56.417664.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-02-02T04-45-56.417664.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-02T04-45-56.417664.parquet", "**/details_harness|hendrycksTest-virology|5_2024-02-02T04-45-56.417664.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-02-02T04-45-56.417664.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-02T04-45-56.417664.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-02-02T04-45-56.417664.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-02-02T04-45-56.417664.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-02-02T04-45-56.417664.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-02T04-45-56.417664.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-02-02T04-45-56.417664.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-02-02T04-45-56.417664.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-02-02T04-45-56.417664.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-02-02T04-45-56.417664.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-02-02T04-45-56.417664.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-02-02T04-45-56.417664.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-02-02T04-45-56.417664.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-02T04-45-56.417664.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-02-02T04-45-56.417664.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-02T04-45-56.417664.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-02T04-45-56.417664.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-02-02T04-45-56.417664.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-02-02T04-45-56.417664.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-02-02T04-45-56.417664.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-02T04-45-56.417664.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-02T04-45-56.417664.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-02T04-45-56.417664.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-02-02T04-45-56.417664.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-02T04-45-56.417664.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-02T04-45-56.417664.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-02T04-45-56.417664.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-02T04-45-56.417664.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-02-02T04-45-56.417664.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-02T04-45-56.417664.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-02T04-45-56.417664.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-02T04-45-56.417664.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-02T04-45-56.417664.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-02-02T04-45-56.417664.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-02-02T04-45-56.417664.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-02-02T04-45-56.417664.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-02-02T04-45-56.417664.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-02T04-45-56.417664.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-02-02T04-45-56.417664.parquet", "**/details_harness|hendrycksTest-management|5_2024-02-02T04-45-56.417664.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-02-02T04-45-56.417664.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-02-02T04-45-56.417664.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-02-02T04-45-56.417664.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-02-02T04-45-56.417664.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-02T04-45-56.417664.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-02-02T04-45-56.417664.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-02-02T04-45-56.417664.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-02-02T04-45-56.417664.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-02-02T04-45-56.417664.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-02-02T04-45-56.417664.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-02-02T04-45-56.417664.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-02-02T04-45-56.417664.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-02-02T04-45-56.417664.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-02-02T04-45-56.417664.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-02-02T04-45-56.417664.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-02T04-45-56.417664.parquet", "**/details_harness|hendrycksTest-virology|5_2024-02-02T04-45-56.417664.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-02-02T04-45-56.417664.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_02_02T04_45_56.417664", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-02T04-45-56.417664.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-02T04-45-56.417664.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_02_02T04_45_56.417664", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-02-02T04-45-56.417664.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-02-02T04-45-56.417664.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_02_02T04_45_56.417664", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-02-02T04-45-56.417664.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-02-02T04-45-56.417664.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_02_02T04_45_56.417664", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-02-02T04-45-56.417664.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-02-02T04-45-56.417664.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_02_02T04_45_56.417664", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-02T04-45-56.417664.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-02T04-45-56.417664.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_02_02T04_45_56.417664", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-02-02T04-45-56.417664.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-02-02T04-45-56.417664.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_02_02T04_45_56.417664", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-02-02T04-45-56.417664.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-02-02T04-45-56.417664.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_02_02T04_45_56.417664", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-02-02T04-45-56.417664.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-02-02T04-45-56.417664.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_02_02T04_45_56.417664", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-02-02T04-45-56.417664.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-02-02T04-45-56.417664.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_02_02T04_45_56.417664", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-02-02T04-45-56.417664.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-02-02T04-45-56.417664.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_02_02T04_45_56.417664", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-02-02T04-45-56.417664.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-02-02T04-45-56.417664.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_02_02T04_45_56.417664", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-02-02T04-45-56.417664.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-02-02T04-45-56.417664.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_02_02T04_45_56.417664", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-02T04-45-56.417664.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-02T04-45-56.417664.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_02_02T04_45_56.417664", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-02-02T04-45-56.417664.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-02-02T04-45-56.417664.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_02_02T04_45_56.417664", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-02T04-45-56.417664.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-02T04-45-56.417664.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_02_02T04_45_56.417664", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-02T04-45-56.417664.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-02T04-45-56.417664.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_02_02T04_45_56.417664", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-02-02T04-45-56.417664.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-02-02T04-45-56.417664.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_02_02T04_45_56.417664", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-02-02T04-45-56.417664.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-02-02T04-45-56.417664.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_02_02T04_45_56.417664", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-02-02T04-45-56.417664.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-02-02T04-45-56.417664.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_02_02T04_45_56.417664", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-02T04-45-56.417664.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-02T04-45-56.417664.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_02_02T04_45_56.417664", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-02T04-45-56.417664.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-02T04-45-56.417664.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_02_02T04_45_56.417664", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-02T04-45-56.417664.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-02T04-45-56.417664.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_02_02T04_45_56.417664", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-02-02T04-45-56.417664.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-02-02T04-45-56.417664.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_02_02T04_45_56.417664", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-02T04-45-56.417664.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-02T04-45-56.417664.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_02_02T04_45_56.417664", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-02T04-45-56.417664.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-02T04-45-56.417664.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_02_02T04_45_56.417664", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-02T04-45-56.417664.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-02T04-45-56.417664.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_02_02T04_45_56.417664", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-02T04-45-56.417664.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-02T04-45-56.417664.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_02_02T04_45_56.417664", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-02-02T04-45-56.417664.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-02-02T04-45-56.417664.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_02_02T04_45_56.417664", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-02T04-45-56.417664.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-02T04-45-56.417664.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_02_02T04_45_56.417664", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-02T04-45-56.417664.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-02T04-45-56.417664.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_02_02T04_45_56.417664", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-02T04-45-56.417664.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-02T04-45-56.417664.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_02_02T04_45_56.417664", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-02T04-45-56.417664.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-02T04-45-56.417664.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_02_02T04_45_56.417664", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-02-02T04-45-56.417664.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-02-02T04-45-56.417664.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_02_02T04_45_56.417664", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-02-02T04-45-56.417664.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-02-02T04-45-56.417664.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_02_02T04_45_56.417664", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-02-02T04-45-56.417664.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-02-02T04-45-56.417664.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_02_02T04_45_56.417664", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-02-02T04-45-56.417664.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-02-02T04-45-56.417664.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_02_02T04_45_56.417664", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-02T04-45-56.417664.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-02T04-45-56.417664.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_02_02T04_45_56.417664", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-02-02T04-45-56.417664.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-02-02T04-45-56.417664.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_02_02T04_45_56.417664", "path": ["**/details_harness|hendrycksTest-management|5_2024-02-02T04-45-56.417664.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-02-02T04-45-56.417664.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_02_02T04_45_56.417664", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-02-02T04-45-56.417664.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-02-02T04-45-56.417664.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_02_02T04_45_56.417664", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-02-02T04-45-56.417664.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-02-02T04-45-56.417664.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_02_02T04_45_56.417664", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-02-02T04-45-56.417664.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-02-02T04-45-56.417664.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_02_02T04_45_56.417664", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-02-02T04-45-56.417664.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-02-02T04-45-56.417664.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_02_02T04_45_56.417664", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-02T04-45-56.417664.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-02T04-45-56.417664.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_02_02T04_45_56.417664", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-02-02T04-45-56.417664.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-02-02T04-45-56.417664.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_02_02T04_45_56.417664", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-02-02T04-45-56.417664.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-02-02T04-45-56.417664.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_02_02T04_45_56.417664", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-02-02T04-45-56.417664.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-02-02T04-45-56.417664.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_02_02T04_45_56.417664", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-02-02T04-45-56.417664.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-02-02T04-45-56.417664.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_02_02T04_45_56.417664", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-02-02T04-45-56.417664.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-02-02T04-45-56.417664.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_02_02T04_45_56.417664", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-02-02T04-45-56.417664.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-02-02T04-45-56.417664.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_02_02T04_45_56.417664", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-02-02T04-45-56.417664.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-02-02T04-45-56.417664.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_02_02T04_45_56.417664", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-02-02T04-45-56.417664.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-02-02T04-45-56.417664.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_02_02T04_45_56.417664", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-02-02T04-45-56.417664.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-02-02T04-45-56.417664.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_02_02T04_45_56.417664", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-02-02T04-45-56.417664.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-02-02T04-45-56.417664.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_02_02T04_45_56.417664", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-02T04-45-56.417664.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-02T04-45-56.417664.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_02_02T04_45_56.417664", "path": ["**/details_harness|hendrycksTest-virology|5_2024-02-02T04-45-56.417664.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-02-02T04-45-56.417664.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_02_02T04_45_56.417664", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-02-02T04-45-56.417664.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-02-02T04-45-56.417664.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_02_02T04_45_56.417664", "path": ["**/details_harness|truthfulqa:mc|0_2024-02-02T04-45-56.417664.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-02-02T04-45-56.417664.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_02_02T04_45_56.417664", "path": ["**/details_harness|winogrande|5_2024-02-02T04-45-56.417664.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-02-02T04-45-56.417664.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_02_02T04_45_56.417664", "path": ["results_2024-02-02T04-45-56.417664.parquet"]}, {"split": "latest", "path": ["results_2024-02-02T04-45-56.417664.parquet"]}]}]}
2024-02-02T04:48:14+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of Lvxy1117/amber_fine_tune_ori Dataset automatically created during the evaluation run of model Lvxy1117/amber_fine_tune_ori on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-02-02T04:45:56.417664(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of Lvxy1117/amber_fine_tune_ori\n\n\n\nDataset automatically created during the evaluation run of model Lvxy1117/amber_fine_tune_ori on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-02-02T04:45:56.417664(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of Lvxy1117/amber_fine_tune_ori\n\n\n\nDataset automatically created during the evaluation run of model Lvxy1117/amber_fine_tune_ori on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-02-02T04:45:56.417664(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
494eaa1a66138acbf8810c22bfbb3dfef94537c6
# Dataset Card for Evaluation run of DrNicefellow/ChatAllInOne-Yi-34B-200K-V1 <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [DrNicefellow/ChatAllInOne-Yi-34B-200K-V1](https://huggingface.co/DrNicefellow/ChatAllInOne-Yi-34B-200K-V1) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_DrNicefellow__ChatAllInOne-Yi-34B-200K-V1", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-02-02T07:46:14.329996](https://huggingface.co/datasets/open-llm-leaderboard/details_DrNicefellow__ChatAllInOne-Yi-34B-200K-V1/blob/main/results_2024-02-02T07-46-14.329996.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.7350413866691472, "acc_stderr": 0.029053323018774252, "acc_norm": 0.7399853641294432, "acc_norm_stderr": 0.029602382625034382, "mc1": 0.4112607099143207, "mc1_stderr": 0.017225627083660867, "mc2": 0.5682495749148809, "mc2_stderr": 0.014775176850726521 }, "harness|arc:challenge|25": { "acc": 0.621160409556314, "acc_stderr": 0.014175915490000326, "acc_norm": 0.659556313993174, "acc_norm_stderr": 0.01384746051889298 }, "harness|hellaswag|10": { "acc": 0.6427006572395937, "acc_stderr": 0.004782246931195002, "acc_norm": 0.8458474407488548, "acc_norm_stderr": 0.0036035695286784127 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.46, "acc_stderr": 0.05009082659620332, "acc_norm": 0.46, "acc_norm_stderr": 0.05009082659620332 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.7185185185185186, "acc_stderr": 0.038850042458002526, "acc_norm": 0.7185185185185186, "acc_norm_stderr": 0.038850042458002526 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.875, "acc_stderr": 0.026913523521537846, "acc_norm": 0.875, "acc_norm_stderr": 0.026913523521537846 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.76, "acc_stderr": 0.04292346959909283, "acc_norm": 0.76, "acc_norm_stderr": 0.04292346959909283 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.8113207547169812, "acc_stderr": 0.024079995130062253, "acc_norm": 0.8113207547169812, "acc_norm_stderr": 0.024079995130062253 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.8819444444444444, "acc_stderr": 0.026983346503309375, "acc_norm": 0.8819444444444444, "acc_norm_stderr": 0.026983346503309375 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.48, "acc_stderr": 0.050211673156867795, "acc_norm": 0.48, "acc_norm_stderr": 0.050211673156867795 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.63, "acc_stderr": 0.048523658709391, "acc_norm": 0.63, "acc_norm_stderr": 0.048523658709391 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.43, "acc_stderr": 0.0497569851956243, "acc_norm": 0.43, "acc_norm_stderr": 0.0497569851956243 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.7225433526011561, "acc_stderr": 0.03414014007044036, "acc_norm": 0.7225433526011561, "acc_norm_stderr": 0.03414014007044036 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.45098039215686275, "acc_stderr": 0.04951218252396264, "acc_norm": 0.45098039215686275, "acc_norm_stderr": 0.04951218252396264 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.81, "acc_stderr": 0.03942772444036624, "acc_norm": 0.81, "acc_norm_stderr": 0.03942772444036624 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.7446808510638298, "acc_stderr": 0.028504856470514255, "acc_norm": 0.7446808510638298, "acc_norm_stderr": 0.028504856470514255 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.5526315789473685, "acc_stderr": 0.04677473004491199, "acc_norm": 0.5526315789473685, "acc_norm_stderr": 0.04677473004491199 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.7724137931034483, "acc_stderr": 0.03493950380131184, "acc_norm": 0.7724137931034483, "acc_norm_stderr": 0.03493950380131184 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.582010582010582, "acc_stderr": 0.025402555503260912, "acc_norm": 0.582010582010582, "acc_norm_stderr": 0.025402555503260912 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.5158730158730159, "acc_stderr": 0.044698818540726076, "acc_norm": 0.5158730158730159, "acc_norm_stderr": 0.044698818540726076 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.52, "acc_stderr": 0.050211673156867795, "acc_norm": 0.52, "acc_norm_stderr": 0.050211673156867795 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.8709677419354839, "acc_stderr": 0.01907088925479276, "acc_norm": 0.8709677419354839, "acc_norm_stderr": 0.01907088925479276 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.6059113300492611, "acc_stderr": 0.034381579670365446, "acc_norm": 0.6059113300492611, "acc_norm_stderr": 0.034381579670365446 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.75, "acc_stderr": 0.04351941398892446, "acc_norm": 0.75, "acc_norm_stderr": 0.04351941398892446 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.8484848484848485, "acc_stderr": 0.027998073798781668, "acc_norm": 0.8484848484848485, "acc_norm_stderr": 0.027998073798781668 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.9191919191919192, "acc_stderr": 0.019417681889724536, "acc_norm": 0.9191919191919192, "acc_norm_stderr": 0.019417681889724536 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.9792746113989638, "acc_stderr": 0.010281417011909029, "acc_norm": 0.9792746113989638, "acc_norm_stderr": 0.010281417011909029 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.7846153846153846, "acc_stderr": 0.020843034557462878, "acc_norm": 0.7846153846153846, "acc_norm_stderr": 0.020843034557462878 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.37407407407407406, "acc_stderr": 0.029502861128955286, "acc_norm": 0.37407407407407406, "acc_norm_stderr": 0.029502861128955286 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.8277310924369747, "acc_stderr": 0.02452866497130541, "acc_norm": 0.8277310924369747, "acc_norm_stderr": 0.02452866497130541 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.4304635761589404, "acc_stderr": 0.04042809961395634, "acc_norm": 0.4304635761589404, "acc_norm_stderr": 0.04042809961395634 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.9119266055045872, "acc_stderr": 0.012150743719481655, "acc_norm": 0.9119266055045872, "acc_norm_stderr": 0.012150743719481655 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.625, "acc_stderr": 0.033016908987210894, "acc_norm": 0.625, "acc_norm_stderr": 0.033016908987210894 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.9019607843137255, "acc_stderr": 0.020871118455552097, "acc_norm": 0.9019607843137255, "acc_norm_stderr": 0.020871118455552097 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.9071729957805907, "acc_stderr": 0.01888975055095672, "acc_norm": 0.9071729957805907, "acc_norm_stderr": 0.01888975055095672 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.7847533632286996, "acc_stderr": 0.02758406660220827, "acc_norm": 0.7847533632286996, "acc_norm_stderr": 0.02758406660220827 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.8396946564885496, "acc_stderr": 0.03217829420744632, "acc_norm": 0.8396946564885496, "acc_norm_stderr": 0.03217829420744632 }, "harness|hendrycksTest-international_law|5": { "acc": 0.8842975206611571, "acc_stderr": 0.029199802455622804, "acc_norm": 0.8842975206611571, "acc_norm_stderr": 0.029199802455622804 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.8703703703703703, "acc_stderr": 0.03247224389917948, "acc_norm": 0.8703703703703703, "acc_norm_stderr": 0.03247224389917948 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.8588957055214724, "acc_stderr": 0.027351605518389752, "acc_norm": 0.8588957055214724, "acc_norm_stderr": 0.027351605518389752 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.5089285714285714, "acc_stderr": 0.04745033255489123, "acc_norm": 0.5089285714285714, "acc_norm_stderr": 0.04745033255489123 }, "harness|hendrycksTest-management|5": { "acc": 0.8640776699029126, "acc_stderr": 0.033932957297610096, "acc_norm": 0.8640776699029126, "acc_norm_stderr": 0.033932957297610096 }, "harness|hendrycksTest-marketing|5": { "acc": 0.9273504273504274, "acc_stderr": 0.017004368568132342, "acc_norm": 0.9273504273504274, "acc_norm_stderr": 0.017004368568132342 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.83, "acc_stderr": 0.03775251680686371, "acc_norm": 0.83, "acc_norm_stderr": 0.03775251680686371 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.8876117496807152, "acc_stderr": 0.011294541351216533, "acc_norm": 0.8876117496807152, "acc_norm_stderr": 0.011294541351216533 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.8034682080924855, "acc_stderr": 0.021393961404363847, "acc_norm": 0.8034682080924855, "acc_norm_stderr": 0.021393961404363847 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.6022346368715084, "acc_stderr": 0.01636920497126298, "acc_norm": 0.6022346368715084, "acc_norm_stderr": 0.01636920497126298 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.8169934640522876, "acc_stderr": 0.022140767512880966, "acc_norm": 0.8169934640522876, "acc_norm_stderr": 0.022140767512880966 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.8135048231511254, "acc_stderr": 0.022122439772480764, "acc_norm": 0.8135048231511254, "acc_norm_stderr": 0.022122439772480764 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.8672839506172839, "acc_stderr": 0.01887735383957185, "acc_norm": 0.8672839506172839, "acc_norm_stderr": 0.01887735383957185 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.599290780141844, "acc_stderr": 0.029233465745573096, "acc_norm": 0.599290780141844, "acc_norm_stderr": 0.029233465745573096 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.5710560625814863, "acc_stderr": 0.012640625443067365, "acc_norm": 0.5710560625814863, "acc_norm_stderr": 0.012640625443067365 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.8125, "acc_stderr": 0.023709788253811766, "acc_norm": 0.8125, "acc_norm_stderr": 0.023709788253811766 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.8088235294117647, "acc_stderr": 0.01590829013627805, "acc_norm": 0.8088235294117647, "acc_norm_stderr": 0.01590829013627805 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.6909090909090909, "acc_stderr": 0.044262946482000985, "acc_norm": 0.6909090909090909, "acc_norm_stderr": 0.044262946482000985 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.8367346938775511, "acc_stderr": 0.023661699177098608, "acc_norm": 0.8367346938775511, "acc_norm_stderr": 0.023661699177098608 }, "harness|hendrycksTest-sociology|5": { "acc": 0.8905472636815921, "acc_stderr": 0.022076326101824657, "acc_norm": 0.8905472636815921, "acc_norm_stderr": 0.022076326101824657 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.91, "acc_stderr": 0.028762349126466125, "acc_norm": 0.91, "acc_norm_stderr": 0.028762349126466125 }, "harness|hendrycksTest-virology|5": { "acc": 0.5542168674698795, "acc_stderr": 0.03869543323472101, "acc_norm": 0.5542168674698795, "acc_norm_stderr": 0.03869543323472101 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.8654970760233918, "acc_stderr": 0.026168221344662297, "acc_norm": 0.8654970760233918, "acc_norm_stderr": 0.026168221344662297 }, "harness|truthfulqa:mc|0": { "mc1": 0.4112607099143207, "mc1_stderr": 0.017225627083660867, "mc2": 0.5682495749148809, "mc2_stderr": 0.014775176850726521 }, "harness|winogrande|5": { "acc": 0.824782951854775, "acc_stderr": 0.010684179227706167 }, "harness|gsm8k|5": { "acc": 0.5951478392721758, "acc_stderr": 0.013520817666870515 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_DrNicefellow__ChatAllInOne-Yi-34B-200K-V1
[ "region:us" ]
2024-02-02T04:56:56+00:00
{"pretty_name": "Evaluation run of DrNicefellow/ChatAllInOne-Yi-34B-200K-V1", "dataset_summary": "Dataset automatically created during the evaluation run of model [DrNicefellow/ChatAllInOne-Yi-34B-200K-V1](https://huggingface.co/DrNicefellow/ChatAllInOne-Yi-34B-200K-V1) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_DrNicefellow__ChatAllInOne-Yi-34B-200K-V1\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-02-02T07:46:14.329996](https://huggingface.co/datasets/open-llm-leaderboard/details_DrNicefellow__ChatAllInOne-Yi-34B-200K-V1/blob/main/results_2024-02-02T07-46-14.329996.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.7350413866691472,\n \"acc_stderr\": 0.029053323018774252,\n \"acc_norm\": 0.7399853641294432,\n \"acc_norm_stderr\": 0.029602382625034382,\n \"mc1\": 0.4112607099143207,\n \"mc1_stderr\": 0.017225627083660867,\n \"mc2\": 0.5682495749148809,\n \"mc2_stderr\": 0.014775176850726521\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.621160409556314,\n \"acc_stderr\": 0.014175915490000326,\n \"acc_norm\": 0.659556313993174,\n \"acc_norm_stderr\": 0.01384746051889298\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.6427006572395937,\n \"acc_stderr\": 0.004782246931195002,\n \"acc_norm\": 0.8458474407488548,\n \"acc_norm_stderr\": 0.0036035695286784127\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.46,\n \"acc_stderr\": 0.05009082659620332,\n \"acc_norm\": 0.46,\n \"acc_norm_stderr\": 0.05009082659620332\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.7185185185185186,\n \"acc_stderr\": 0.038850042458002526,\n \"acc_norm\": 0.7185185185185186,\n \"acc_norm_stderr\": 0.038850042458002526\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.875,\n \"acc_stderr\": 0.026913523521537846,\n \"acc_norm\": 0.875,\n \"acc_norm_stderr\": 0.026913523521537846\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.76,\n \"acc_stderr\": 0.04292346959909283,\n \"acc_norm\": 0.76,\n \"acc_norm_stderr\": 0.04292346959909283\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.8113207547169812,\n \"acc_stderr\": 0.024079995130062253,\n \"acc_norm\": 0.8113207547169812,\n \"acc_norm_stderr\": 0.024079995130062253\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.8819444444444444,\n \"acc_stderr\": 0.026983346503309375,\n \"acc_norm\": 0.8819444444444444,\n \"acc_norm_stderr\": 0.026983346503309375\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.48,\n \"acc_stderr\": 0.050211673156867795,\n \"acc_norm\": 0.48,\n \"acc_norm_stderr\": 0.050211673156867795\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.63,\n \"acc_stderr\": 0.048523658709391,\n \"acc_norm\": 0.63,\n \"acc_norm_stderr\": 0.048523658709391\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.43,\n \"acc_stderr\": 0.0497569851956243,\n \"acc_norm\": 0.43,\n \"acc_norm_stderr\": 0.0497569851956243\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.7225433526011561,\n \"acc_stderr\": 0.03414014007044036,\n \"acc_norm\": 0.7225433526011561,\n \"acc_norm_stderr\": 0.03414014007044036\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.45098039215686275,\n \"acc_stderr\": 0.04951218252396264,\n \"acc_norm\": 0.45098039215686275,\n \"acc_norm_stderr\": 0.04951218252396264\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.81,\n \"acc_stderr\": 0.03942772444036624,\n \"acc_norm\": 0.81,\n \"acc_norm_stderr\": 0.03942772444036624\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.7446808510638298,\n \"acc_stderr\": 0.028504856470514255,\n \"acc_norm\": 0.7446808510638298,\n \"acc_norm_stderr\": 0.028504856470514255\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.5526315789473685,\n \"acc_stderr\": 0.04677473004491199,\n \"acc_norm\": 0.5526315789473685,\n \"acc_norm_stderr\": 0.04677473004491199\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.7724137931034483,\n \"acc_stderr\": 0.03493950380131184,\n \"acc_norm\": 0.7724137931034483,\n \"acc_norm_stderr\": 0.03493950380131184\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.582010582010582,\n \"acc_stderr\": 0.025402555503260912,\n \"acc_norm\": 0.582010582010582,\n \"acc_norm_stderr\": 0.025402555503260912\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.5158730158730159,\n \"acc_stderr\": 0.044698818540726076,\n \"acc_norm\": 0.5158730158730159,\n \"acc_norm_stderr\": 0.044698818540726076\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.52,\n \"acc_stderr\": 0.050211673156867795,\n \"acc_norm\": 0.52,\n \"acc_norm_stderr\": 0.050211673156867795\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.8709677419354839,\n \"acc_stderr\": 0.01907088925479276,\n \"acc_norm\": 0.8709677419354839,\n \"acc_norm_stderr\": 0.01907088925479276\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.6059113300492611,\n \"acc_stderr\": 0.034381579670365446,\n \"acc_norm\": 0.6059113300492611,\n \"acc_norm_stderr\": 0.034381579670365446\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.75,\n \"acc_stderr\": 0.04351941398892446,\n \"acc_norm\": 0.75,\n \"acc_norm_stderr\": 0.04351941398892446\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.8484848484848485,\n \"acc_stderr\": 0.027998073798781668,\n \"acc_norm\": 0.8484848484848485,\n \"acc_norm_stderr\": 0.027998073798781668\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.9191919191919192,\n \"acc_stderr\": 0.019417681889724536,\n \"acc_norm\": 0.9191919191919192,\n \"acc_norm_stderr\": 0.019417681889724536\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.9792746113989638,\n \"acc_stderr\": 0.010281417011909029,\n \"acc_norm\": 0.9792746113989638,\n \"acc_norm_stderr\": 0.010281417011909029\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.7846153846153846,\n \"acc_stderr\": 0.020843034557462878,\n \"acc_norm\": 0.7846153846153846,\n \"acc_norm_stderr\": 0.020843034557462878\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.37407407407407406,\n \"acc_stderr\": 0.029502861128955286,\n \"acc_norm\": 0.37407407407407406,\n \"acc_norm_stderr\": 0.029502861128955286\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.8277310924369747,\n \"acc_stderr\": 0.02452866497130541,\n \"acc_norm\": 0.8277310924369747,\n \"acc_norm_stderr\": 0.02452866497130541\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.4304635761589404,\n \"acc_stderr\": 0.04042809961395634,\n \"acc_norm\": 0.4304635761589404,\n \"acc_norm_stderr\": 0.04042809961395634\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.9119266055045872,\n \"acc_stderr\": 0.012150743719481655,\n \"acc_norm\": 0.9119266055045872,\n \"acc_norm_stderr\": 0.012150743719481655\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.625,\n \"acc_stderr\": 0.033016908987210894,\n \"acc_norm\": 0.625,\n \"acc_norm_stderr\": 0.033016908987210894\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.9019607843137255,\n \"acc_stderr\": 0.020871118455552097,\n \"acc_norm\": 0.9019607843137255,\n \"acc_norm_stderr\": 0.020871118455552097\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.9071729957805907,\n \"acc_stderr\": 0.01888975055095672,\n \"acc_norm\": 0.9071729957805907,\n \"acc_norm_stderr\": 0.01888975055095672\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.7847533632286996,\n \"acc_stderr\": 0.02758406660220827,\n \"acc_norm\": 0.7847533632286996,\n \"acc_norm_stderr\": 0.02758406660220827\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.8396946564885496,\n \"acc_stderr\": 0.03217829420744632,\n \"acc_norm\": 0.8396946564885496,\n \"acc_norm_stderr\": 0.03217829420744632\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.8842975206611571,\n \"acc_stderr\": 0.029199802455622804,\n \"acc_norm\": 0.8842975206611571,\n \"acc_norm_stderr\": 0.029199802455622804\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.8703703703703703,\n \"acc_stderr\": 0.03247224389917948,\n \"acc_norm\": 0.8703703703703703,\n \"acc_norm_stderr\": 0.03247224389917948\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.8588957055214724,\n \"acc_stderr\": 0.027351605518389752,\n \"acc_norm\": 0.8588957055214724,\n \"acc_norm_stderr\": 0.027351605518389752\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.5089285714285714,\n \"acc_stderr\": 0.04745033255489123,\n \"acc_norm\": 0.5089285714285714,\n \"acc_norm_stderr\": 0.04745033255489123\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.8640776699029126,\n \"acc_stderr\": 0.033932957297610096,\n \"acc_norm\": 0.8640776699029126,\n \"acc_norm_stderr\": 0.033932957297610096\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.9273504273504274,\n \"acc_stderr\": 0.017004368568132342,\n \"acc_norm\": 0.9273504273504274,\n \"acc_norm_stderr\": 0.017004368568132342\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.83,\n \"acc_stderr\": 0.03775251680686371,\n \"acc_norm\": 0.83,\n \"acc_norm_stderr\": 0.03775251680686371\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.8876117496807152,\n \"acc_stderr\": 0.011294541351216533,\n \"acc_norm\": 0.8876117496807152,\n \"acc_norm_stderr\": 0.011294541351216533\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.8034682080924855,\n \"acc_stderr\": 0.021393961404363847,\n \"acc_norm\": 0.8034682080924855,\n \"acc_norm_stderr\": 0.021393961404363847\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.6022346368715084,\n \"acc_stderr\": 0.01636920497126298,\n \"acc_norm\": 0.6022346368715084,\n \"acc_norm_stderr\": 0.01636920497126298\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.8169934640522876,\n \"acc_stderr\": 0.022140767512880966,\n \"acc_norm\": 0.8169934640522876,\n \"acc_norm_stderr\": 0.022140767512880966\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.8135048231511254,\n \"acc_stderr\": 0.022122439772480764,\n \"acc_norm\": 0.8135048231511254,\n \"acc_norm_stderr\": 0.022122439772480764\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.8672839506172839,\n \"acc_stderr\": 0.01887735383957185,\n \"acc_norm\": 0.8672839506172839,\n \"acc_norm_stderr\": 0.01887735383957185\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.599290780141844,\n \"acc_stderr\": 0.029233465745573096,\n \"acc_norm\": 0.599290780141844,\n \"acc_norm_stderr\": 0.029233465745573096\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.5710560625814863,\n \"acc_stderr\": 0.012640625443067365,\n \"acc_norm\": 0.5710560625814863,\n \"acc_norm_stderr\": 0.012640625443067365\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.8125,\n \"acc_stderr\": 0.023709788253811766,\n \"acc_norm\": 0.8125,\n \"acc_norm_stderr\": 0.023709788253811766\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.8088235294117647,\n \"acc_stderr\": 0.01590829013627805,\n \"acc_norm\": 0.8088235294117647,\n \"acc_norm_stderr\": 0.01590829013627805\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6909090909090909,\n \"acc_stderr\": 0.044262946482000985,\n \"acc_norm\": 0.6909090909090909,\n \"acc_norm_stderr\": 0.044262946482000985\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.8367346938775511,\n \"acc_stderr\": 0.023661699177098608,\n \"acc_norm\": 0.8367346938775511,\n \"acc_norm_stderr\": 0.023661699177098608\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.8905472636815921,\n \"acc_stderr\": 0.022076326101824657,\n \"acc_norm\": 0.8905472636815921,\n \"acc_norm_stderr\": 0.022076326101824657\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.91,\n \"acc_stderr\": 0.028762349126466125,\n \"acc_norm\": 0.91,\n \"acc_norm_stderr\": 0.028762349126466125\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.5542168674698795,\n \"acc_stderr\": 0.03869543323472101,\n \"acc_norm\": 0.5542168674698795,\n \"acc_norm_stderr\": 0.03869543323472101\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.8654970760233918,\n \"acc_stderr\": 0.026168221344662297,\n \"acc_norm\": 0.8654970760233918,\n \"acc_norm_stderr\": 0.026168221344662297\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.4112607099143207,\n \"mc1_stderr\": 0.017225627083660867,\n \"mc2\": 0.5682495749148809,\n \"mc2_stderr\": 0.014775176850726521\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.824782951854775,\n \"acc_stderr\": 0.010684179227706167\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.5951478392721758,\n \"acc_stderr\": 0.013520817666870515\n }\n}\n```", "repo_url": "https://huggingface.co/DrNicefellow/ChatAllInOne-Yi-34B-200K-V1", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_02_02T04_54_39.793108", "path": ["**/details_harness|arc:challenge|25_2024-02-02T04-54-39.793108.parquet"]}, {"split": "2024_02_02T07_46_14.329996", "path": ["**/details_harness|arc:challenge|25_2024-02-02T07-46-14.329996.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-02-02T07-46-14.329996.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_02_02T04_54_39.793108", "path": ["**/details_harness|gsm8k|5_2024-02-02T04-54-39.793108.parquet"]}, {"split": "2024_02_02T07_46_14.329996", "path": ["**/details_harness|gsm8k|5_2024-02-02T07-46-14.329996.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-02-02T07-46-14.329996.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_02_02T04_54_39.793108", "path": ["**/details_harness|hellaswag|10_2024-02-02T04-54-39.793108.parquet"]}, {"split": "2024_02_02T07_46_14.329996", "path": ["**/details_harness|hellaswag|10_2024-02-02T07-46-14.329996.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-02-02T07-46-14.329996.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_02_02T04_54_39.793108", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-02T04-54-39.793108.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-02-02T04-54-39.793108.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-02-02T04-54-39.793108.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-02-02T04-54-39.793108.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-02T04-54-39.793108.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-02-02T04-54-39.793108.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-02-02T04-54-39.793108.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-02-02T04-54-39.793108.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-02-02T04-54-39.793108.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-02-02T04-54-39.793108.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-02-02T04-54-39.793108.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-02-02T04-54-39.793108.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-02T04-54-39.793108.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-02-02T04-54-39.793108.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-02T04-54-39.793108.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-02T04-54-39.793108.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-02-02T04-54-39.793108.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-02-02T04-54-39.793108.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-02-02T04-54-39.793108.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-02T04-54-39.793108.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-02T04-54-39.793108.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-02T04-54-39.793108.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-02-02T04-54-39.793108.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-02T04-54-39.793108.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-02T04-54-39.793108.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-02T04-54-39.793108.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-02T04-54-39.793108.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-02-02T04-54-39.793108.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-02T04-54-39.793108.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-02T04-54-39.793108.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-02T04-54-39.793108.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-02T04-54-39.793108.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-02-02T04-54-39.793108.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-02-02T04-54-39.793108.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-02-02T04-54-39.793108.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-02-02T04-54-39.793108.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-02T04-54-39.793108.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-02-02T04-54-39.793108.parquet", "**/details_harness|hendrycksTest-management|5_2024-02-02T04-54-39.793108.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-02-02T04-54-39.793108.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-02-02T04-54-39.793108.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-02-02T04-54-39.793108.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-02-02T04-54-39.793108.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-02T04-54-39.793108.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-02-02T04-54-39.793108.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-02-02T04-54-39.793108.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-02-02T04-54-39.793108.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-02-02T04-54-39.793108.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-02-02T04-54-39.793108.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-02-02T04-54-39.793108.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-02-02T04-54-39.793108.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-02-02T04-54-39.793108.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-02-02T04-54-39.793108.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-02-02T04-54-39.793108.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-02T04-54-39.793108.parquet", "**/details_harness|hendrycksTest-virology|5_2024-02-02T04-54-39.793108.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-02-02T04-54-39.793108.parquet"]}, {"split": "2024_02_02T07_46_14.329996", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-02T07-46-14.329996.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-02-02T07-46-14.329996.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-02-02T07-46-14.329996.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-02-02T07-46-14.329996.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-02T07-46-14.329996.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-02-02T07-46-14.329996.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-02-02T07-46-14.329996.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-02-02T07-46-14.329996.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-02-02T07-46-14.329996.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-02-02T07-46-14.329996.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-02-02T07-46-14.329996.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-02-02T07-46-14.329996.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-02T07-46-14.329996.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-02-02T07-46-14.329996.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-02T07-46-14.329996.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-02T07-46-14.329996.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-02-02T07-46-14.329996.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-02-02T07-46-14.329996.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-02-02T07-46-14.329996.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-02T07-46-14.329996.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-02T07-46-14.329996.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-02T07-46-14.329996.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-02-02T07-46-14.329996.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-02T07-46-14.329996.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-02T07-46-14.329996.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-02T07-46-14.329996.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-02T07-46-14.329996.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-02-02T07-46-14.329996.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-02T07-46-14.329996.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-02T07-46-14.329996.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-02T07-46-14.329996.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-02T07-46-14.329996.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-02-02T07-46-14.329996.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-02-02T07-46-14.329996.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-02-02T07-46-14.329996.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-02-02T07-46-14.329996.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-02T07-46-14.329996.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-02-02T07-46-14.329996.parquet", "**/details_harness|hendrycksTest-management|5_2024-02-02T07-46-14.329996.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-02-02T07-46-14.329996.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-02-02T07-46-14.329996.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-02-02T07-46-14.329996.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-02-02T07-46-14.329996.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-02T07-46-14.329996.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-02-02T07-46-14.329996.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-02-02T07-46-14.329996.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-02-02T07-46-14.329996.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-02-02T07-46-14.329996.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-02-02T07-46-14.329996.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-02-02T07-46-14.329996.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-02-02T07-46-14.329996.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-02-02T07-46-14.329996.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-02-02T07-46-14.329996.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-02-02T07-46-14.329996.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-02T07-46-14.329996.parquet", "**/details_harness|hendrycksTest-virology|5_2024-02-02T07-46-14.329996.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-02-02T07-46-14.329996.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-02T07-46-14.329996.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-02-02T07-46-14.329996.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-02-02T07-46-14.329996.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-02-02T07-46-14.329996.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-02T07-46-14.329996.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-02-02T07-46-14.329996.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-02-02T07-46-14.329996.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-02-02T07-46-14.329996.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-02-02T07-46-14.329996.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-02-02T07-46-14.329996.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-02-02T07-46-14.329996.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-02-02T07-46-14.329996.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-02T07-46-14.329996.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-02-02T07-46-14.329996.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-02T07-46-14.329996.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-02T07-46-14.329996.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-02-02T07-46-14.329996.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-02-02T07-46-14.329996.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-02-02T07-46-14.329996.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-02T07-46-14.329996.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-02T07-46-14.329996.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-02T07-46-14.329996.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-02-02T07-46-14.329996.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-02T07-46-14.329996.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-02T07-46-14.329996.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-02T07-46-14.329996.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-02T07-46-14.329996.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-02-02T07-46-14.329996.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-02T07-46-14.329996.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-02T07-46-14.329996.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-02T07-46-14.329996.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-02T07-46-14.329996.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-02-02T07-46-14.329996.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-02-02T07-46-14.329996.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-02-02T07-46-14.329996.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-02-02T07-46-14.329996.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-02T07-46-14.329996.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-02-02T07-46-14.329996.parquet", "**/details_harness|hendrycksTest-management|5_2024-02-02T07-46-14.329996.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-02-02T07-46-14.329996.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-02-02T07-46-14.329996.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-02-02T07-46-14.329996.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-02-02T07-46-14.329996.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-02T07-46-14.329996.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-02-02T07-46-14.329996.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-02-02T07-46-14.329996.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-02-02T07-46-14.329996.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-02-02T07-46-14.329996.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-02-02T07-46-14.329996.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-02-02T07-46-14.329996.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-02-02T07-46-14.329996.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-02-02T07-46-14.329996.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-02-02T07-46-14.329996.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-02-02T07-46-14.329996.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-02T07-46-14.329996.parquet", "**/details_harness|hendrycksTest-virology|5_2024-02-02T07-46-14.329996.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-02-02T07-46-14.329996.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_02_02T04_54_39.793108", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-02T04-54-39.793108.parquet"]}, {"split": "2024_02_02T07_46_14.329996", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-02T07-46-14.329996.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-02T07-46-14.329996.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_02_02T04_54_39.793108", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-02-02T04-54-39.793108.parquet"]}, {"split": "2024_02_02T07_46_14.329996", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-02-02T07-46-14.329996.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-02-02T07-46-14.329996.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_02_02T04_54_39.793108", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-02-02T04-54-39.793108.parquet"]}, {"split": "2024_02_02T07_46_14.329996", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-02-02T07-46-14.329996.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-02-02T07-46-14.329996.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_02_02T04_54_39.793108", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-02-02T04-54-39.793108.parquet"]}, {"split": "2024_02_02T07_46_14.329996", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-02-02T07-46-14.329996.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-02-02T07-46-14.329996.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_02_02T04_54_39.793108", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-02T04-54-39.793108.parquet"]}, {"split": "2024_02_02T07_46_14.329996", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-02T07-46-14.329996.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-02T07-46-14.329996.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_02_02T04_54_39.793108", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-02-02T04-54-39.793108.parquet"]}, {"split": "2024_02_02T07_46_14.329996", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-02-02T07-46-14.329996.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-02-02T07-46-14.329996.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_02_02T04_54_39.793108", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-02-02T04-54-39.793108.parquet"]}, {"split": "2024_02_02T07_46_14.329996", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-02-02T07-46-14.329996.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-02-02T07-46-14.329996.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_02_02T04_54_39.793108", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-02-02T04-54-39.793108.parquet"]}, {"split": "2024_02_02T07_46_14.329996", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-02-02T07-46-14.329996.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-02-02T07-46-14.329996.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_02_02T04_54_39.793108", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-02-02T04-54-39.793108.parquet"]}, {"split": "2024_02_02T07_46_14.329996", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-02-02T07-46-14.329996.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-02-02T07-46-14.329996.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_02_02T04_54_39.793108", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-02-02T04-54-39.793108.parquet"]}, {"split": "2024_02_02T07_46_14.329996", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-02-02T07-46-14.329996.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-02-02T07-46-14.329996.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_02_02T04_54_39.793108", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-02-02T04-54-39.793108.parquet"]}, {"split": "2024_02_02T07_46_14.329996", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-02-02T07-46-14.329996.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-02-02T07-46-14.329996.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_02_02T04_54_39.793108", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-02-02T04-54-39.793108.parquet"]}, {"split": "2024_02_02T07_46_14.329996", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-02-02T07-46-14.329996.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-02-02T07-46-14.329996.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_02_02T04_54_39.793108", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-02T04-54-39.793108.parquet"]}, {"split": "2024_02_02T07_46_14.329996", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-02T07-46-14.329996.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-02T07-46-14.329996.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_02_02T04_54_39.793108", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-02-02T04-54-39.793108.parquet"]}, {"split": "2024_02_02T07_46_14.329996", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-02-02T07-46-14.329996.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-02-02T07-46-14.329996.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_02_02T04_54_39.793108", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-02T04-54-39.793108.parquet"]}, {"split": "2024_02_02T07_46_14.329996", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-02T07-46-14.329996.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-02T07-46-14.329996.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_02_02T04_54_39.793108", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-02T04-54-39.793108.parquet"]}, {"split": "2024_02_02T07_46_14.329996", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-02T07-46-14.329996.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-02T07-46-14.329996.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_02_02T04_54_39.793108", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-02-02T04-54-39.793108.parquet"]}, {"split": "2024_02_02T07_46_14.329996", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-02-02T07-46-14.329996.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-02-02T07-46-14.329996.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_02_02T04_54_39.793108", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-02-02T04-54-39.793108.parquet"]}, {"split": "2024_02_02T07_46_14.329996", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-02-02T07-46-14.329996.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-02-02T07-46-14.329996.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_02_02T04_54_39.793108", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-02-02T04-54-39.793108.parquet"]}, {"split": "2024_02_02T07_46_14.329996", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-02-02T07-46-14.329996.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-02-02T07-46-14.329996.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_02_02T04_54_39.793108", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-02T04-54-39.793108.parquet"]}, {"split": "2024_02_02T07_46_14.329996", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-02T07-46-14.329996.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-02T07-46-14.329996.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_02_02T04_54_39.793108", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-02T04-54-39.793108.parquet"]}, {"split": "2024_02_02T07_46_14.329996", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-02T07-46-14.329996.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-02T07-46-14.329996.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_02_02T04_54_39.793108", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-02T04-54-39.793108.parquet"]}, {"split": "2024_02_02T07_46_14.329996", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-02T07-46-14.329996.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-02T07-46-14.329996.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_02_02T04_54_39.793108", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-02-02T04-54-39.793108.parquet"]}, {"split": "2024_02_02T07_46_14.329996", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-02-02T07-46-14.329996.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-02-02T07-46-14.329996.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_02_02T04_54_39.793108", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-02T04-54-39.793108.parquet"]}, {"split": "2024_02_02T07_46_14.329996", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-02T07-46-14.329996.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-02T07-46-14.329996.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_02_02T04_54_39.793108", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-02T04-54-39.793108.parquet"]}, {"split": "2024_02_02T07_46_14.329996", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-02T07-46-14.329996.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-02T07-46-14.329996.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_02_02T04_54_39.793108", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-02T04-54-39.793108.parquet"]}, {"split": "2024_02_02T07_46_14.329996", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-02T07-46-14.329996.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-02T07-46-14.329996.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_02_02T04_54_39.793108", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-02T04-54-39.793108.parquet"]}, {"split": "2024_02_02T07_46_14.329996", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-02T07-46-14.329996.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-02T07-46-14.329996.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_02_02T04_54_39.793108", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-02-02T04-54-39.793108.parquet"]}, {"split": "2024_02_02T07_46_14.329996", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-02-02T07-46-14.329996.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-02-02T07-46-14.329996.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_02_02T04_54_39.793108", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-02T04-54-39.793108.parquet"]}, {"split": "2024_02_02T07_46_14.329996", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-02T07-46-14.329996.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-02T07-46-14.329996.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_02_02T04_54_39.793108", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-02T04-54-39.793108.parquet"]}, {"split": "2024_02_02T07_46_14.329996", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-02T07-46-14.329996.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-02T07-46-14.329996.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_02_02T04_54_39.793108", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-02T04-54-39.793108.parquet"]}, {"split": "2024_02_02T07_46_14.329996", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-02T07-46-14.329996.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-02T07-46-14.329996.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_02_02T04_54_39.793108", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-02T04-54-39.793108.parquet"]}, {"split": "2024_02_02T07_46_14.329996", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-02T07-46-14.329996.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-02T07-46-14.329996.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_02_02T04_54_39.793108", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-02-02T04-54-39.793108.parquet"]}, {"split": "2024_02_02T07_46_14.329996", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-02-02T07-46-14.329996.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-02-02T07-46-14.329996.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_02_02T04_54_39.793108", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-02-02T04-54-39.793108.parquet"]}, {"split": "2024_02_02T07_46_14.329996", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-02-02T07-46-14.329996.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-02-02T07-46-14.329996.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_02_02T04_54_39.793108", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-02-02T04-54-39.793108.parquet"]}, {"split": "2024_02_02T07_46_14.329996", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-02-02T07-46-14.329996.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-02-02T07-46-14.329996.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_02_02T04_54_39.793108", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-02-02T04-54-39.793108.parquet"]}, {"split": "2024_02_02T07_46_14.329996", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-02-02T07-46-14.329996.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-02-02T07-46-14.329996.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_02_02T04_54_39.793108", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-02T04-54-39.793108.parquet"]}, {"split": "2024_02_02T07_46_14.329996", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-02T07-46-14.329996.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-02T07-46-14.329996.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_02_02T04_54_39.793108", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-02-02T04-54-39.793108.parquet"]}, {"split": "2024_02_02T07_46_14.329996", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-02-02T07-46-14.329996.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-02-02T07-46-14.329996.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_02_02T04_54_39.793108", "path": ["**/details_harness|hendrycksTest-management|5_2024-02-02T04-54-39.793108.parquet"]}, {"split": "2024_02_02T07_46_14.329996", "path": ["**/details_harness|hendrycksTest-management|5_2024-02-02T07-46-14.329996.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-02-02T07-46-14.329996.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_02_02T04_54_39.793108", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-02-02T04-54-39.793108.parquet"]}, {"split": "2024_02_02T07_46_14.329996", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-02-02T07-46-14.329996.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-02-02T07-46-14.329996.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_02_02T04_54_39.793108", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-02-02T04-54-39.793108.parquet"]}, {"split": "2024_02_02T07_46_14.329996", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-02-02T07-46-14.329996.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-02-02T07-46-14.329996.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_02_02T04_54_39.793108", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-02-02T04-54-39.793108.parquet"]}, {"split": "2024_02_02T07_46_14.329996", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-02-02T07-46-14.329996.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-02-02T07-46-14.329996.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_02_02T04_54_39.793108", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-02-02T04-54-39.793108.parquet"]}, {"split": "2024_02_02T07_46_14.329996", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-02-02T07-46-14.329996.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-02-02T07-46-14.329996.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_02_02T04_54_39.793108", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-02T04-54-39.793108.parquet"]}, {"split": "2024_02_02T07_46_14.329996", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-02T07-46-14.329996.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-02T07-46-14.329996.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_02_02T04_54_39.793108", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-02-02T04-54-39.793108.parquet"]}, {"split": "2024_02_02T07_46_14.329996", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-02-02T07-46-14.329996.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-02-02T07-46-14.329996.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_02_02T04_54_39.793108", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-02-02T04-54-39.793108.parquet"]}, {"split": "2024_02_02T07_46_14.329996", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-02-02T07-46-14.329996.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-02-02T07-46-14.329996.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_02_02T04_54_39.793108", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-02-02T04-54-39.793108.parquet"]}, {"split": "2024_02_02T07_46_14.329996", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-02-02T07-46-14.329996.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-02-02T07-46-14.329996.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_02_02T04_54_39.793108", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-02-02T04-54-39.793108.parquet"]}, {"split": "2024_02_02T07_46_14.329996", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-02-02T07-46-14.329996.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-02-02T07-46-14.329996.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_02_02T04_54_39.793108", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-02-02T04-54-39.793108.parquet"]}, {"split": "2024_02_02T07_46_14.329996", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-02-02T07-46-14.329996.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-02-02T07-46-14.329996.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_02_02T04_54_39.793108", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-02-02T04-54-39.793108.parquet"]}, {"split": "2024_02_02T07_46_14.329996", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-02-02T07-46-14.329996.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-02-02T07-46-14.329996.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_02_02T04_54_39.793108", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-02-02T04-54-39.793108.parquet"]}, {"split": "2024_02_02T07_46_14.329996", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-02-02T07-46-14.329996.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-02-02T07-46-14.329996.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_02_02T04_54_39.793108", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-02-02T04-54-39.793108.parquet"]}, {"split": "2024_02_02T07_46_14.329996", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-02-02T07-46-14.329996.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-02-02T07-46-14.329996.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_02_02T04_54_39.793108", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-02-02T04-54-39.793108.parquet"]}, {"split": "2024_02_02T07_46_14.329996", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-02-02T07-46-14.329996.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-02-02T07-46-14.329996.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_02_02T04_54_39.793108", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-02-02T04-54-39.793108.parquet"]}, {"split": "2024_02_02T07_46_14.329996", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-02-02T07-46-14.329996.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-02-02T07-46-14.329996.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_02_02T04_54_39.793108", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-02T04-54-39.793108.parquet"]}, {"split": "2024_02_02T07_46_14.329996", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-02T07-46-14.329996.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-02T07-46-14.329996.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_02_02T04_54_39.793108", "path": ["**/details_harness|hendrycksTest-virology|5_2024-02-02T04-54-39.793108.parquet"]}, {"split": "2024_02_02T07_46_14.329996", "path": ["**/details_harness|hendrycksTest-virology|5_2024-02-02T07-46-14.329996.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-02-02T07-46-14.329996.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_02_02T04_54_39.793108", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-02-02T04-54-39.793108.parquet"]}, {"split": "2024_02_02T07_46_14.329996", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-02-02T07-46-14.329996.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-02-02T07-46-14.329996.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_02_02T04_54_39.793108", "path": ["**/details_harness|truthfulqa:mc|0_2024-02-02T04-54-39.793108.parquet"]}, {"split": "2024_02_02T07_46_14.329996", "path": ["**/details_harness|truthfulqa:mc|0_2024-02-02T07-46-14.329996.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-02-02T07-46-14.329996.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_02_02T04_54_39.793108", "path": ["**/details_harness|winogrande|5_2024-02-02T04-54-39.793108.parquet"]}, {"split": "2024_02_02T07_46_14.329996", "path": ["**/details_harness|winogrande|5_2024-02-02T07-46-14.329996.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-02-02T07-46-14.329996.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_02_02T04_54_39.793108", "path": ["results_2024-02-02T04-54-39.793108.parquet"]}, {"split": "2024_02_02T07_46_14.329996", "path": ["results_2024-02-02T07-46-14.329996.parquet"]}, {"split": "latest", "path": ["results_2024-02-02T07-46-14.329996.parquet"]}]}]}
2024-02-02T07:48:50+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of DrNicefellow/ChatAllInOne-Yi-34B-200K-V1 Dataset automatically created during the evaluation run of model DrNicefellow/ChatAllInOne-Yi-34B-200K-V1 on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-02-02T07:46:14.329996(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of DrNicefellow/ChatAllInOne-Yi-34B-200K-V1\n\n\n\nDataset automatically created during the evaluation run of model DrNicefellow/ChatAllInOne-Yi-34B-200K-V1 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-02-02T07:46:14.329996(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of DrNicefellow/ChatAllInOne-Yi-34B-200K-V1\n\n\n\nDataset automatically created during the evaluation run of model DrNicefellow/ChatAllInOne-Yi-34B-200K-V1 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-02-02T07:46:14.329996(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
004e9b06ed5fe5f43032878c36006744848d14b8
# Dataset Card for Evaluation run of DreadPoor/Bageluccine-2-7B-slerp <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [DreadPoor/Bageluccine-2-7B-slerp](https://huggingface.co/DreadPoor/Bageluccine-2-7B-slerp) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_DreadPoor__Bageluccine-2-7B-slerp", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-02-02T05:08:22.463551](https://huggingface.co/datasets/open-llm-leaderboard/details_DreadPoor__Bageluccine-2-7B-slerp/blob/main/results_2024-02-02T05-08-22.463551.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.6228426601871014, "acc_stderr": 0.03278261103107283, "acc_norm": 0.6269105911488114, "acc_norm_stderr": 0.033434918339746184, "mc1": 0.49571603427172584, "mc1_stderr": 0.017502858577371258, "mc2": 0.6557212567877906, "mc2_stderr": 0.01542070502166937 }, "harness|arc:challenge|25": { "acc": 0.6254266211604096, "acc_stderr": 0.01414419347189345, "acc_norm": 0.6638225255972696, "acc_norm_stderr": 0.013804855026205765 }, "harness|hellaswag|10": { "acc": 0.673272256522605, "acc_stderr": 0.004680582263524271, "acc_norm": 0.8551085441147181, "acc_norm_stderr": 0.003512719952354536 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.3, "acc_stderr": 0.046056618647183814, "acc_norm": 0.3, "acc_norm_stderr": 0.046056618647183814 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.5777777777777777, "acc_stderr": 0.04266763404099582, "acc_norm": 0.5777777777777777, "acc_norm_stderr": 0.04266763404099582 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.6907894736842105, "acc_stderr": 0.037610708698674805, "acc_norm": 0.6907894736842105, "acc_norm_stderr": 0.037610708698674805 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.54, "acc_stderr": 0.05009082659620332, "acc_norm": 0.54, "acc_norm_stderr": 0.05009082659620332 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.6981132075471698, "acc_stderr": 0.028254200344438655, "acc_norm": 0.6981132075471698, "acc_norm_stderr": 0.028254200344438655 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.7222222222222222, "acc_stderr": 0.037455547914624555, "acc_norm": 0.7222222222222222, "acc_norm_stderr": 0.037455547914624555 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.44, "acc_stderr": 0.04988876515698589, "acc_norm": 0.44, "acc_norm_stderr": 0.04988876515698589 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.48, "acc_stderr": 0.050211673156867795, "acc_norm": 0.48, "acc_norm_stderr": 0.050211673156867795 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.33, "acc_stderr": 0.047258156262526045, "acc_norm": 0.33, "acc_norm_stderr": 0.047258156262526045 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.6184971098265896, "acc_stderr": 0.03703851193099521, "acc_norm": 0.6184971098265896, "acc_norm_stderr": 0.03703851193099521 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.3333333333333333, "acc_stderr": 0.04690650298201942, "acc_norm": 0.3333333333333333, "acc_norm_stderr": 0.04690650298201942 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.73, "acc_stderr": 0.0446196043338474, "acc_norm": 0.73, "acc_norm_stderr": 0.0446196043338474 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.5276595744680851, "acc_stderr": 0.03263597118409769, "acc_norm": 0.5276595744680851, "acc_norm_stderr": 0.03263597118409769 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.4649122807017544, "acc_stderr": 0.046920083813689104, "acc_norm": 0.4649122807017544, "acc_norm_stderr": 0.046920083813689104 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.5793103448275863, "acc_stderr": 0.0411391498118926, "acc_norm": 0.5793103448275863, "acc_norm_stderr": 0.0411391498118926 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.4021164021164021, "acc_stderr": 0.02525303255499769, "acc_norm": 0.4021164021164021, "acc_norm_stderr": 0.02525303255499769 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.4603174603174603, "acc_stderr": 0.04458029125470973, "acc_norm": 0.4603174603174603, "acc_norm_stderr": 0.04458029125470973 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.43, "acc_stderr": 0.049756985195624284, "acc_norm": 0.43, "acc_norm_stderr": 0.049756985195624284 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.632258064516129, "acc_stderr": 0.027430866579973467, "acc_norm": 0.632258064516129, "acc_norm_stderr": 0.027430866579973467 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.5467980295566502, "acc_stderr": 0.03502544650845872, "acc_norm": 0.5467980295566502, "acc_norm_stderr": 0.03502544650845872 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.65, "acc_stderr": 0.047937248544110196, "acc_norm": 0.65, "acc_norm_stderr": 0.047937248544110196 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.7696969696969697, "acc_stderr": 0.032876667586034906, "acc_norm": 0.7696969696969697, "acc_norm_stderr": 0.032876667586034906 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.7929292929292929, "acc_stderr": 0.02886977846026704, "acc_norm": 0.7929292929292929, "acc_norm_stderr": 0.02886977846026704 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.844559585492228, "acc_stderr": 0.026148483469153324, "acc_norm": 0.844559585492228, "acc_norm_stderr": 0.026148483469153324 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.6153846153846154, "acc_stderr": 0.024666744915187215, "acc_norm": 0.6153846153846154, "acc_norm_stderr": 0.024666744915187215 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.34814814814814815, "acc_stderr": 0.029045600290616258, "acc_norm": 0.34814814814814815, "acc_norm_stderr": 0.029045600290616258 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.6680672268907563, "acc_stderr": 0.030588697013783642, "acc_norm": 0.6680672268907563, "acc_norm_stderr": 0.030588697013783642 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.33774834437086093, "acc_stderr": 0.038615575462551684, "acc_norm": 0.33774834437086093, "acc_norm_stderr": 0.038615575462551684 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.8238532110091743, "acc_stderr": 0.016332882393431367, "acc_norm": 0.8238532110091743, "acc_norm_stderr": 0.016332882393431367 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.4861111111111111, "acc_stderr": 0.03408655867977748, "acc_norm": 0.4861111111111111, "acc_norm_stderr": 0.03408655867977748 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.7941176470588235, "acc_stderr": 0.02837944945158866, "acc_norm": 0.7941176470588235, "acc_norm_stderr": 0.02837944945158866 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.7805907172995781, "acc_stderr": 0.026939106581553945, "acc_norm": 0.7805907172995781, "acc_norm_stderr": 0.026939106581553945 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.6502242152466368, "acc_stderr": 0.03200736719484503, "acc_norm": 0.6502242152466368, "acc_norm_stderr": 0.03200736719484503 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.7557251908396947, "acc_stderr": 0.037683359597287434, "acc_norm": 0.7557251908396947, "acc_norm_stderr": 0.037683359597287434 }, "harness|hendrycksTest-international_law|5": { "acc": 0.8347107438016529, "acc_stderr": 0.03390780612972776, "acc_norm": 0.8347107438016529, "acc_norm_stderr": 0.03390780612972776 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.7314814814814815, "acc_stderr": 0.042844679680521934, "acc_norm": 0.7314814814814815, "acc_norm_stderr": 0.042844679680521934 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.7423312883435583, "acc_stderr": 0.03436150827846917, "acc_norm": 0.7423312883435583, "acc_norm_stderr": 0.03436150827846917 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.4107142857142857, "acc_stderr": 0.04669510663875191, "acc_norm": 0.4107142857142857, "acc_norm_stderr": 0.04669510663875191 }, "harness|hendrycksTest-management|5": { "acc": 0.7669902912621359, "acc_stderr": 0.04185832598928315, "acc_norm": 0.7669902912621359, "acc_norm_stderr": 0.04185832598928315 }, "harness|hendrycksTest-marketing|5": { "acc": 0.8846153846153846, "acc_stderr": 0.020930193185179333, "acc_norm": 0.8846153846153846, "acc_norm_stderr": 0.020930193185179333 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.72, "acc_stderr": 0.04512608598542128, "acc_norm": 0.72, "acc_norm_stderr": 0.04512608598542128 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.8109833971902938, "acc_stderr": 0.014000791294407003, "acc_norm": 0.8109833971902938, "acc_norm_stderr": 0.014000791294407003 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.7052023121387283, "acc_stderr": 0.024547617794803828, "acc_norm": 0.7052023121387283, "acc_norm_stderr": 0.024547617794803828 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.4424581005586592, "acc_stderr": 0.016611393687268588, "acc_norm": 0.4424581005586592, "acc_norm_stderr": 0.016611393687268588 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.7156862745098039, "acc_stderr": 0.025829163272757482, "acc_norm": 0.7156862745098039, "acc_norm_stderr": 0.025829163272757482 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.6816720257234726, "acc_stderr": 0.02645722506781103, "acc_norm": 0.6816720257234726, "acc_norm_stderr": 0.02645722506781103 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.6944444444444444, "acc_stderr": 0.025630824975621348, "acc_norm": 0.6944444444444444, "acc_norm_stderr": 0.025630824975621348 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.475177304964539, "acc_stderr": 0.029790719243829727, "acc_norm": 0.475177304964539, "acc_norm_stderr": 0.029790719243829727 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.4517601043024772, "acc_stderr": 0.012710662233660245, "acc_norm": 0.4517601043024772, "acc_norm_stderr": 0.012710662233660245 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.6764705882352942, "acc_stderr": 0.028418208619406755, "acc_norm": 0.6764705882352942, "acc_norm_stderr": 0.028418208619406755 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.6715686274509803, "acc_stderr": 0.018999707383162673, "acc_norm": 0.6715686274509803, "acc_norm_stderr": 0.018999707383162673 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.6909090909090909, "acc_stderr": 0.044262946482000985, "acc_norm": 0.6909090909090909, "acc_norm_stderr": 0.044262946482000985 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.7306122448979592, "acc_stderr": 0.02840125202902294, "acc_norm": 0.7306122448979592, "acc_norm_stderr": 0.02840125202902294 }, "harness|hendrycksTest-sociology|5": { "acc": 0.6417910447761194, "acc_stderr": 0.03390393042268814, "acc_norm": 0.6417910447761194, "acc_norm_stderr": 0.03390393042268814 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.82, "acc_stderr": 0.038612291966536934, "acc_norm": 0.82, "acc_norm_stderr": 0.038612291966536934 }, "harness|hendrycksTest-virology|5": { "acc": 0.5, "acc_stderr": 0.03892494720807614, "acc_norm": 0.5, "acc_norm_stderr": 0.03892494720807614 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.847953216374269, "acc_stderr": 0.027539122889061463, "acc_norm": 0.847953216374269, "acc_norm_stderr": 0.027539122889061463 }, "harness|truthfulqa:mc|0": { "mc1": 0.49571603427172584, "mc1_stderr": 0.017502858577371258, "mc2": 0.6557212567877906, "mc2_stderr": 0.01542070502166937 }, "harness|winogrande|5": { "acc": 0.7687450670876085, "acc_stderr": 0.011850040124850508 }, "harness|gsm8k|5": { "acc": 0.45716451857467777, "acc_stderr": 0.01372184996870972 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_DreadPoor__Bageluccine-2-7B-slerp
[ "region:us" ]
2024-02-02T05:10:41+00:00
{"pretty_name": "Evaluation run of DreadPoor/Bageluccine-2-7B-slerp", "dataset_summary": "Dataset automatically created during the evaluation run of model [DreadPoor/Bageluccine-2-7B-slerp](https://huggingface.co/DreadPoor/Bageluccine-2-7B-slerp) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_DreadPoor__Bageluccine-2-7B-slerp\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-02-02T05:08:22.463551](https://huggingface.co/datasets/open-llm-leaderboard/details_DreadPoor__Bageluccine-2-7B-slerp/blob/main/results_2024-02-02T05-08-22.463551.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.6228426601871014,\n \"acc_stderr\": 0.03278261103107283,\n \"acc_norm\": 0.6269105911488114,\n \"acc_norm_stderr\": 0.033434918339746184,\n \"mc1\": 0.49571603427172584,\n \"mc1_stderr\": 0.017502858577371258,\n \"mc2\": 0.6557212567877906,\n \"mc2_stderr\": 0.01542070502166937\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.6254266211604096,\n \"acc_stderr\": 0.01414419347189345,\n \"acc_norm\": 0.6638225255972696,\n \"acc_norm_stderr\": 0.013804855026205765\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.673272256522605,\n \"acc_stderr\": 0.004680582263524271,\n \"acc_norm\": 0.8551085441147181,\n \"acc_norm_stderr\": 0.003512719952354536\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.3,\n \"acc_stderr\": 0.046056618647183814,\n \"acc_norm\": 0.3,\n \"acc_norm_stderr\": 0.046056618647183814\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.5777777777777777,\n \"acc_stderr\": 0.04266763404099582,\n \"acc_norm\": 0.5777777777777777,\n \"acc_norm_stderr\": 0.04266763404099582\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.6907894736842105,\n \"acc_stderr\": 0.037610708698674805,\n \"acc_norm\": 0.6907894736842105,\n \"acc_norm_stderr\": 0.037610708698674805\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.54,\n \"acc_stderr\": 0.05009082659620332,\n \"acc_norm\": 0.54,\n \"acc_norm_stderr\": 0.05009082659620332\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.6981132075471698,\n \"acc_stderr\": 0.028254200344438655,\n \"acc_norm\": 0.6981132075471698,\n \"acc_norm_stderr\": 0.028254200344438655\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.7222222222222222,\n \"acc_stderr\": 0.037455547914624555,\n \"acc_norm\": 0.7222222222222222,\n \"acc_norm_stderr\": 0.037455547914624555\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.44,\n \"acc_stderr\": 0.04988876515698589,\n \"acc_norm\": 0.44,\n \"acc_norm_stderr\": 0.04988876515698589\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.48,\n \"acc_stderr\": 0.050211673156867795,\n \"acc_norm\": 0.48,\n \"acc_norm_stderr\": 0.050211673156867795\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.33,\n \"acc_stderr\": 0.047258156262526045,\n \"acc_norm\": 0.33,\n \"acc_norm_stderr\": 0.047258156262526045\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.6184971098265896,\n \"acc_stderr\": 0.03703851193099521,\n \"acc_norm\": 0.6184971098265896,\n \"acc_norm_stderr\": 0.03703851193099521\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.3333333333333333,\n \"acc_stderr\": 0.04690650298201942,\n \"acc_norm\": 0.3333333333333333,\n \"acc_norm_stderr\": 0.04690650298201942\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.73,\n \"acc_stderr\": 0.0446196043338474,\n \"acc_norm\": 0.73,\n \"acc_norm_stderr\": 0.0446196043338474\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.5276595744680851,\n \"acc_stderr\": 0.03263597118409769,\n \"acc_norm\": 0.5276595744680851,\n \"acc_norm_stderr\": 0.03263597118409769\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.4649122807017544,\n \"acc_stderr\": 0.046920083813689104,\n \"acc_norm\": 0.4649122807017544,\n \"acc_norm_stderr\": 0.046920083813689104\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.5793103448275863,\n \"acc_stderr\": 0.0411391498118926,\n \"acc_norm\": 0.5793103448275863,\n \"acc_norm_stderr\": 0.0411391498118926\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.4021164021164021,\n \"acc_stderr\": 0.02525303255499769,\n \"acc_norm\": 0.4021164021164021,\n \"acc_norm_stderr\": 0.02525303255499769\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.4603174603174603,\n \"acc_stderr\": 0.04458029125470973,\n \"acc_norm\": 0.4603174603174603,\n \"acc_norm_stderr\": 0.04458029125470973\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.43,\n \"acc_stderr\": 0.049756985195624284,\n \"acc_norm\": 0.43,\n \"acc_norm_stderr\": 0.049756985195624284\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.632258064516129,\n \"acc_stderr\": 0.027430866579973467,\n \"acc_norm\": 0.632258064516129,\n \"acc_norm_stderr\": 0.027430866579973467\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.5467980295566502,\n \"acc_stderr\": 0.03502544650845872,\n \"acc_norm\": 0.5467980295566502,\n \"acc_norm_stderr\": 0.03502544650845872\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.65,\n \"acc_stderr\": 0.047937248544110196,\n \"acc_norm\": 0.65,\n \"acc_norm_stderr\": 0.047937248544110196\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.7696969696969697,\n \"acc_stderr\": 0.032876667586034906,\n \"acc_norm\": 0.7696969696969697,\n \"acc_norm_stderr\": 0.032876667586034906\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.7929292929292929,\n \"acc_stderr\": 0.02886977846026704,\n \"acc_norm\": 0.7929292929292929,\n \"acc_norm_stderr\": 0.02886977846026704\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.844559585492228,\n \"acc_stderr\": 0.026148483469153324,\n \"acc_norm\": 0.844559585492228,\n \"acc_norm_stderr\": 0.026148483469153324\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.6153846153846154,\n \"acc_stderr\": 0.024666744915187215,\n \"acc_norm\": 0.6153846153846154,\n \"acc_norm_stderr\": 0.024666744915187215\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.34814814814814815,\n \"acc_stderr\": 0.029045600290616258,\n \"acc_norm\": 0.34814814814814815,\n \"acc_norm_stderr\": 0.029045600290616258\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.6680672268907563,\n \"acc_stderr\": 0.030588697013783642,\n \"acc_norm\": 0.6680672268907563,\n \"acc_norm_stderr\": 0.030588697013783642\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.33774834437086093,\n \"acc_stderr\": 0.038615575462551684,\n \"acc_norm\": 0.33774834437086093,\n \"acc_norm_stderr\": 0.038615575462551684\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.8238532110091743,\n \"acc_stderr\": 0.016332882393431367,\n \"acc_norm\": 0.8238532110091743,\n \"acc_norm_stderr\": 0.016332882393431367\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.4861111111111111,\n \"acc_stderr\": 0.03408655867977748,\n \"acc_norm\": 0.4861111111111111,\n \"acc_norm_stderr\": 0.03408655867977748\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.7941176470588235,\n \"acc_stderr\": 0.02837944945158866,\n \"acc_norm\": 0.7941176470588235,\n \"acc_norm_stderr\": 0.02837944945158866\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.7805907172995781,\n \"acc_stderr\": 0.026939106581553945,\n \"acc_norm\": 0.7805907172995781,\n \"acc_norm_stderr\": 0.026939106581553945\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.6502242152466368,\n \"acc_stderr\": 0.03200736719484503,\n \"acc_norm\": 0.6502242152466368,\n \"acc_norm_stderr\": 0.03200736719484503\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.7557251908396947,\n \"acc_stderr\": 0.037683359597287434,\n \"acc_norm\": 0.7557251908396947,\n \"acc_norm_stderr\": 0.037683359597287434\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.8347107438016529,\n \"acc_stderr\": 0.03390780612972776,\n \"acc_norm\": 0.8347107438016529,\n \"acc_norm_stderr\": 0.03390780612972776\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.7314814814814815,\n \"acc_stderr\": 0.042844679680521934,\n \"acc_norm\": 0.7314814814814815,\n \"acc_norm_stderr\": 0.042844679680521934\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.7423312883435583,\n \"acc_stderr\": 0.03436150827846917,\n \"acc_norm\": 0.7423312883435583,\n \"acc_norm_stderr\": 0.03436150827846917\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.4107142857142857,\n \"acc_stderr\": 0.04669510663875191,\n \"acc_norm\": 0.4107142857142857,\n \"acc_norm_stderr\": 0.04669510663875191\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.7669902912621359,\n \"acc_stderr\": 0.04185832598928315,\n \"acc_norm\": 0.7669902912621359,\n \"acc_norm_stderr\": 0.04185832598928315\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8846153846153846,\n \"acc_stderr\": 0.020930193185179333,\n \"acc_norm\": 0.8846153846153846,\n \"acc_norm_stderr\": 0.020930193185179333\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.72,\n \"acc_stderr\": 0.04512608598542128,\n \"acc_norm\": 0.72,\n \"acc_norm_stderr\": 0.04512608598542128\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.8109833971902938,\n \"acc_stderr\": 0.014000791294407003,\n \"acc_norm\": 0.8109833971902938,\n \"acc_norm_stderr\": 0.014000791294407003\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.7052023121387283,\n \"acc_stderr\": 0.024547617794803828,\n \"acc_norm\": 0.7052023121387283,\n \"acc_norm_stderr\": 0.024547617794803828\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.4424581005586592,\n \"acc_stderr\": 0.016611393687268588,\n \"acc_norm\": 0.4424581005586592,\n \"acc_norm_stderr\": 0.016611393687268588\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.7156862745098039,\n \"acc_stderr\": 0.025829163272757482,\n \"acc_norm\": 0.7156862745098039,\n \"acc_norm_stderr\": 0.025829163272757482\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.6816720257234726,\n \"acc_stderr\": 0.02645722506781103,\n \"acc_norm\": 0.6816720257234726,\n \"acc_norm_stderr\": 0.02645722506781103\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.6944444444444444,\n \"acc_stderr\": 0.025630824975621348,\n \"acc_norm\": 0.6944444444444444,\n \"acc_norm_stderr\": 0.025630824975621348\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.475177304964539,\n \"acc_stderr\": 0.029790719243829727,\n \"acc_norm\": 0.475177304964539,\n \"acc_norm_stderr\": 0.029790719243829727\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.4517601043024772,\n \"acc_stderr\": 0.012710662233660245,\n \"acc_norm\": 0.4517601043024772,\n \"acc_norm_stderr\": 0.012710662233660245\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.6764705882352942,\n \"acc_stderr\": 0.028418208619406755,\n \"acc_norm\": 0.6764705882352942,\n \"acc_norm_stderr\": 0.028418208619406755\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.6715686274509803,\n \"acc_stderr\": 0.018999707383162673,\n \"acc_norm\": 0.6715686274509803,\n \"acc_norm_stderr\": 0.018999707383162673\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6909090909090909,\n \"acc_stderr\": 0.044262946482000985,\n \"acc_norm\": 0.6909090909090909,\n \"acc_norm_stderr\": 0.044262946482000985\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.7306122448979592,\n \"acc_stderr\": 0.02840125202902294,\n \"acc_norm\": 0.7306122448979592,\n \"acc_norm_stderr\": 0.02840125202902294\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.6417910447761194,\n \"acc_stderr\": 0.03390393042268814,\n \"acc_norm\": 0.6417910447761194,\n \"acc_norm_stderr\": 0.03390393042268814\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.82,\n \"acc_stderr\": 0.038612291966536934,\n \"acc_norm\": 0.82,\n \"acc_norm_stderr\": 0.038612291966536934\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.5,\n \"acc_stderr\": 0.03892494720807614,\n \"acc_norm\": 0.5,\n \"acc_norm_stderr\": 0.03892494720807614\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.847953216374269,\n \"acc_stderr\": 0.027539122889061463,\n \"acc_norm\": 0.847953216374269,\n \"acc_norm_stderr\": 0.027539122889061463\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.49571603427172584,\n \"mc1_stderr\": 0.017502858577371258,\n \"mc2\": 0.6557212567877906,\n \"mc2_stderr\": 0.01542070502166937\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.7687450670876085,\n \"acc_stderr\": 0.011850040124850508\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.45716451857467777,\n \"acc_stderr\": 0.01372184996870972\n }\n}\n```", "repo_url": "https://huggingface.co/DreadPoor/Bageluccine-2-7B-slerp", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_02_02T05_08_22.463551", "path": ["**/details_harness|arc:challenge|25_2024-02-02T05-08-22.463551.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-02-02T05-08-22.463551.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_02_02T05_08_22.463551", "path": ["**/details_harness|gsm8k|5_2024-02-02T05-08-22.463551.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-02-02T05-08-22.463551.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_02_02T05_08_22.463551", "path": ["**/details_harness|hellaswag|10_2024-02-02T05-08-22.463551.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-02-02T05-08-22.463551.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_02_02T05_08_22.463551", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-02T05-08-22.463551.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-02-02T05-08-22.463551.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-02-02T05-08-22.463551.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-02-02T05-08-22.463551.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-02T05-08-22.463551.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-02-02T05-08-22.463551.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-02-02T05-08-22.463551.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-02-02T05-08-22.463551.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-02-02T05-08-22.463551.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-02-02T05-08-22.463551.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-02-02T05-08-22.463551.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-02-02T05-08-22.463551.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-02T05-08-22.463551.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-02-02T05-08-22.463551.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-02T05-08-22.463551.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-02T05-08-22.463551.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-02-02T05-08-22.463551.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-02-02T05-08-22.463551.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-02-02T05-08-22.463551.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-02T05-08-22.463551.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-02T05-08-22.463551.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-02T05-08-22.463551.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-02-02T05-08-22.463551.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-02T05-08-22.463551.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-02T05-08-22.463551.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-02T05-08-22.463551.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-02T05-08-22.463551.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-02-02T05-08-22.463551.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-02T05-08-22.463551.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-02T05-08-22.463551.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-02T05-08-22.463551.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-02T05-08-22.463551.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-02-02T05-08-22.463551.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-02-02T05-08-22.463551.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-02-02T05-08-22.463551.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-02-02T05-08-22.463551.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-02T05-08-22.463551.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-02-02T05-08-22.463551.parquet", "**/details_harness|hendrycksTest-management|5_2024-02-02T05-08-22.463551.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-02-02T05-08-22.463551.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-02-02T05-08-22.463551.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-02-02T05-08-22.463551.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-02-02T05-08-22.463551.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-02T05-08-22.463551.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-02-02T05-08-22.463551.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-02-02T05-08-22.463551.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-02-02T05-08-22.463551.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-02-02T05-08-22.463551.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-02-02T05-08-22.463551.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-02-02T05-08-22.463551.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-02-02T05-08-22.463551.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-02-02T05-08-22.463551.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-02-02T05-08-22.463551.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-02-02T05-08-22.463551.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-02T05-08-22.463551.parquet", "**/details_harness|hendrycksTest-virology|5_2024-02-02T05-08-22.463551.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-02-02T05-08-22.463551.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-02T05-08-22.463551.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-02-02T05-08-22.463551.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-02-02T05-08-22.463551.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-02-02T05-08-22.463551.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-02T05-08-22.463551.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-02-02T05-08-22.463551.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-02-02T05-08-22.463551.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-02-02T05-08-22.463551.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-02-02T05-08-22.463551.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-02-02T05-08-22.463551.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-02-02T05-08-22.463551.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-02-02T05-08-22.463551.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-02T05-08-22.463551.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-02-02T05-08-22.463551.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-02T05-08-22.463551.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-02T05-08-22.463551.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-02-02T05-08-22.463551.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-02-02T05-08-22.463551.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-02-02T05-08-22.463551.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-02T05-08-22.463551.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-02T05-08-22.463551.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-02T05-08-22.463551.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-02-02T05-08-22.463551.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-02T05-08-22.463551.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-02T05-08-22.463551.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-02T05-08-22.463551.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-02T05-08-22.463551.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-02-02T05-08-22.463551.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-02T05-08-22.463551.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-02T05-08-22.463551.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-02T05-08-22.463551.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-02T05-08-22.463551.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-02-02T05-08-22.463551.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-02-02T05-08-22.463551.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-02-02T05-08-22.463551.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-02-02T05-08-22.463551.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-02T05-08-22.463551.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-02-02T05-08-22.463551.parquet", "**/details_harness|hendrycksTest-management|5_2024-02-02T05-08-22.463551.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-02-02T05-08-22.463551.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-02-02T05-08-22.463551.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-02-02T05-08-22.463551.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-02-02T05-08-22.463551.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-02T05-08-22.463551.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-02-02T05-08-22.463551.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-02-02T05-08-22.463551.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-02-02T05-08-22.463551.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-02-02T05-08-22.463551.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-02-02T05-08-22.463551.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-02-02T05-08-22.463551.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-02-02T05-08-22.463551.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-02-02T05-08-22.463551.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-02-02T05-08-22.463551.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-02-02T05-08-22.463551.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-02T05-08-22.463551.parquet", "**/details_harness|hendrycksTest-virology|5_2024-02-02T05-08-22.463551.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-02-02T05-08-22.463551.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_02_02T05_08_22.463551", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-02T05-08-22.463551.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-02T05-08-22.463551.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_02_02T05_08_22.463551", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-02-02T05-08-22.463551.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-02-02T05-08-22.463551.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_02_02T05_08_22.463551", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-02-02T05-08-22.463551.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-02-02T05-08-22.463551.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_02_02T05_08_22.463551", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-02-02T05-08-22.463551.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-02-02T05-08-22.463551.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_02_02T05_08_22.463551", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-02T05-08-22.463551.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-02T05-08-22.463551.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_02_02T05_08_22.463551", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-02-02T05-08-22.463551.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-02-02T05-08-22.463551.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_02_02T05_08_22.463551", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-02-02T05-08-22.463551.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-02-02T05-08-22.463551.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_02_02T05_08_22.463551", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-02-02T05-08-22.463551.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-02-02T05-08-22.463551.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_02_02T05_08_22.463551", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-02-02T05-08-22.463551.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-02-02T05-08-22.463551.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_02_02T05_08_22.463551", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-02-02T05-08-22.463551.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-02-02T05-08-22.463551.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_02_02T05_08_22.463551", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-02-02T05-08-22.463551.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-02-02T05-08-22.463551.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_02_02T05_08_22.463551", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-02-02T05-08-22.463551.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-02-02T05-08-22.463551.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_02_02T05_08_22.463551", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-02T05-08-22.463551.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-02T05-08-22.463551.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_02_02T05_08_22.463551", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-02-02T05-08-22.463551.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-02-02T05-08-22.463551.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_02_02T05_08_22.463551", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-02T05-08-22.463551.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-02T05-08-22.463551.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_02_02T05_08_22.463551", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-02T05-08-22.463551.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-02T05-08-22.463551.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_02_02T05_08_22.463551", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-02-02T05-08-22.463551.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-02-02T05-08-22.463551.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_02_02T05_08_22.463551", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-02-02T05-08-22.463551.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-02-02T05-08-22.463551.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_02_02T05_08_22.463551", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-02-02T05-08-22.463551.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-02-02T05-08-22.463551.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_02_02T05_08_22.463551", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-02T05-08-22.463551.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-02T05-08-22.463551.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_02_02T05_08_22.463551", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-02T05-08-22.463551.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-02T05-08-22.463551.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_02_02T05_08_22.463551", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-02T05-08-22.463551.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-02T05-08-22.463551.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_02_02T05_08_22.463551", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-02-02T05-08-22.463551.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-02-02T05-08-22.463551.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_02_02T05_08_22.463551", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-02T05-08-22.463551.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-02T05-08-22.463551.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_02_02T05_08_22.463551", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-02T05-08-22.463551.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-02T05-08-22.463551.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_02_02T05_08_22.463551", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-02T05-08-22.463551.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-02T05-08-22.463551.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_02_02T05_08_22.463551", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-02T05-08-22.463551.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-02T05-08-22.463551.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_02_02T05_08_22.463551", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-02-02T05-08-22.463551.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-02-02T05-08-22.463551.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_02_02T05_08_22.463551", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-02T05-08-22.463551.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-02T05-08-22.463551.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_02_02T05_08_22.463551", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-02T05-08-22.463551.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-02T05-08-22.463551.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_02_02T05_08_22.463551", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-02T05-08-22.463551.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-02T05-08-22.463551.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_02_02T05_08_22.463551", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-02T05-08-22.463551.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-02T05-08-22.463551.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_02_02T05_08_22.463551", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-02-02T05-08-22.463551.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-02-02T05-08-22.463551.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_02_02T05_08_22.463551", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-02-02T05-08-22.463551.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-02-02T05-08-22.463551.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_02_02T05_08_22.463551", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-02-02T05-08-22.463551.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-02-02T05-08-22.463551.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_02_02T05_08_22.463551", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-02-02T05-08-22.463551.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-02-02T05-08-22.463551.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_02_02T05_08_22.463551", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-02T05-08-22.463551.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-02T05-08-22.463551.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_02_02T05_08_22.463551", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-02-02T05-08-22.463551.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-02-02T05-08-22.463551.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_02_02T05_08_22.463551", "path": ["**/details_harness|hendrycksTest-management|5_2024-02-02T05-08-22.463551.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-02-02T05-08-22.463551.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_02_02T05_08_22.463551", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-02-02T05-08-22.463551.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-02-02T05-08-22.463551.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_02_02T05_08_22.463551", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-02-02T05-08-22.463551.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-02-02T05-08-22.463551.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_02_02T05_08_22.463551", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-02-02T05-08-22.463551.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-02-02T05-08-22.463551.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_02_02T05_08_22.463551", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-02-02T05-08-22.463551.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-02-02T05-08-22.463551.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_02_02T05_08_22.463551", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-02T05-08-22.463551.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-02T05-08-22.463551.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_02_02T05_08_22.463551", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-02-02T05-08-22.463551.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-02-02T05-08-22.463551.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_02_02T05_08_22.463551", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-02-02T05-08-22.463551.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-02-02T05-08-22.463551.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_02_02T05_08_22.463551", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-02-02T05-08-22.463551.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-02-02T05-08-22.463551.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_02_02T05_08_22.463551", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-02-02T05-08-22.463551.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-02-02T05-08-22.463551.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_02_02T05_08_22.463551", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-02-02T05-08-22.463551.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-02-02T05-08-22.463551.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_02_02T05_08_22.463551", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-02-02T05-08-22.463551.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-02-02T05-08-22.463551.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_02_02T05_08_22.463551", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-02-02T05-08-22.463551.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-02-02T05-08-22.463551.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_02_02T05_08_22.463551", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-02-02T05-08-22.463551.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-02-02T05-08-22.463551.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_02_02T05_08_22.463551", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-02-02T05-08-22.463551.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-02-02T05-08-22.463551.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_02_02T05_08_22.463551", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-02-02T05-08-22.463551.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-02-02T05-08-22.463551.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_02_02T05_08_22.463551", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-02T05-08-22.463551.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-02T05-08-22.463551.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_02_02T05_08_22.463551", "path": ["**/details_harness|hendrycksTest-virology|5_2024-02-02T05-08-22.463551.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-02-02T05-08-22.463551.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_02_02T05_08_22.463551", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-02-02T05-08-22.463551.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-02-02T05-08-22.463551.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_02_02T05_08_22.463551", "path": ["**/details_harness|truthfulqa:mc|0_2024-02-02T05-08-22.463551.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-02-02T05-08-22.463551.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_02_02T05_08_22.463551", "path": ["**/details_harness|winogrande|5_2024-02-02T05-08-22.463551.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-02-02T05-08-22.463551.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_02_02T05_08_22.463551", "path": ["results_2024-02-02T05-08-22.463551.parquet"]}, {"split": "latest", "path": ["results_2024-02-02T05-08-22.463551.parquet"]}]}]}
2024-02-02T05:11:12+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of DreadPoor/Bageluccine-2-7B-slerp Dataset automatically created during the evaluation run of model DreadPoor/Bageluccine-2-7B-slerp on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-02-02T05:08:22.463551(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of DreadPoor/Bageluccine-2-7B-slerp\n\n\n\nDataset automatically created during the evaluation run of model DreadPoor/Bageluccine-2-7B-slerp on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-02-02T05:08:22.463551(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of DreadPoor/Bageluccine-2-7B-slerp\n\n\n\nDataset automatically created during the evaluation run of model DreadPoor/Bageluccine-2-7B-slerp on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-02-02T05:08:22.463551(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
918e9da4b12b456fb0cc11b2cf3ea98b7c9d5a6a
# Dataset Card for Evaluation run of cloudyu/19B_TRUTH_DPO <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [cloudyu/19B_TRUTH_DPO](https://huggingface.co/cloudyu/19B_TRUTH_DPO) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_cloudyu__19B_TRUTH_DPO", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-02-02T05:24:23.880496](https://huggingface.co/datasets/open-llm-leaderboard/details_cloudyu__19B_TRUTH_DPO/blob/main/results_2024-02-02T05-24-23.880496.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.6604281536017871, "acc_stderr": 0.03188067819207853, "acc_norm": 0.6626612481630715, "acc_norm_stderr": 0.03252011256731915, "mc1": 0.5214198286413708, "mc1_stderr": 0.01748743214471164, "mc2": 0.7223001888589835, "mc2_stderr": 0.014760061606764314 }, "harness|arc:challenge|25": { "acc": 0.6979522184300341, "acc_stderr": 0.013417519144716417, "acc_norm": 0.7167235494880546, "acc_norm_stderr": 0.013167478735134575 }, "harness|hellaswag|10": { "acc": 0.7092212706632145, "acc_stderr": 0.0045319353915070065, "acc_norm": 0.8862776339374626, "acc_norm_stderr": 0.003168249351889306 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.38, "acc_stderr": 0.048783173121456316, "acc_norm": 0.38, "acc_norm_stderr": 0.048783173121456316 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.6370370370370371, "acc_stderr": 0.04153948404742398, "acc_norm": 0.6370370370370371, "acc_norm_stderr": 0.04153948404742398 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.7368421052631579, "acc_stderr": 0.03583496176361072, "acc_norm": 0.7368421052631579, "acc_norm_stderr": 0.03583496176361072 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.74, "acc_stderr": 0.0440844002276808, "acc_norm": 0.74, "acc_norm_stderr": 0.0440844002276808 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.6792452830188679, "acc_stderr": 0.028727502957880267, "acc_norm": 0.6792452830188679, "acc_norm_stderr": 0.028727502957880267 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.7708333333333334, "acc_stderr": 0.03514697467862388, "acc_norm": 0.7708333333333334, "acc_norm_stderr": 0.03514697467862388 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.42, "acc_stderr": 0.049604496374885836, "acc_norm": 0.42, "acc_norm_stderr": 0.049604496374885836 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.46, "acc_stderr": 0.05009082659620333, "acc_norm": 0.46, "acc_norm_stderr": 0.05009082659620333 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.3, "acc_stderr": 0.046056618647183814, "acc_norm": 0.3, "acc_norm_stderr": 0.046056618647183814 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.653179190751445, "acc_stderr": 0.036291466701596636, "acc_norm": 0.653179190751445, "acc_norm_stderr": 0.036291466701596636 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.4117647058823529, "acc_stderr": 0.04897104952726366, "acc_norm": 0.4117647058823529, "acc_norm_stderr": 0.04897104952726366 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.72, "acc_stderr": 0.04512608598542127, "acc_norm": 0.72, "acc_norm_stderr": 0.04512608598542127 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.6297872340425532, "acc_stderr": 0.03156564682236786, "acc_norm": 0.6297872340425532, "acc_norm_stderr": 0.03156564682236786 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.49122807017543857, "acc_stderr": 0.04702880432049615, "acc_norm": 0.49122807017543857, "acc_norm_stderr": 0.04702880432049615 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.6137931034482759, "acc_stderr": 0.04057324734419035, "acc_norm": 0.6137931034482759, "acc_norm_stderr": 0.04057324734419035 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.48677248677248675, "acc_stderr": 0.025742297289575142, "acc_norm": 0.48677248677248675, "acc_norm_stderr": 0.025742297289575142 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.4603174603174603, "acc_stderr": 0.04458029125470973, "acc_norm": 0.4603174603174603, "acc_norm_stderr": 0.04458029125470973 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.35, "acc_stderr": 0.047937248544110196, "acc_norm": 0.35, "acc_norm_stderr": 0.047937248544110196 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.8032258064516129, "acc_stderr": 0.022616409420742025, "acc_norm": 0.8032258064516129, "acc_norm_stderr": 0.022616409420742025 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.5369458128078818, "acc_stderr": 0.035083705204426656, "acc_norm": 0.5369458128078818, "acc_norm_stderr": 0.035083705204426656 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.7, "acc_stderr": 0.046056618647183814, "acc_norm": 0.7, "acc_norm_stderr": 0.046056618647183814 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.8, "acc_stderr": 0.031234752377721175, "acc_norm": 0.8, "acc_norm_stderr": 0.031234752377721175 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.8484848484848485, "acc_stderr": 0.025545650426603627, "acc_norm": 0.8484848484848485, "acc_norm_stderr": 0.025545650426603627 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.8911917098445595, "acc_stderr": 0.022473253332768766, "acc_norm": 0.8911917098445595, "acc_norm_stderr": 0.022473253332768766 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.6717948717948717, "acc_stderr": 0.023807633198657266, "acc_norm": 0.6717948717948717, "acc_norm_stderr": 0.023807633198657266 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.34074074074074073, "acc_stderr": 0.028897748741131137, "acc_norm": 0.34074074074074073, "acc_norm_stderr": 0.028897748741131137 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.7016806722689075, "acc_stderr": 0.029719142876342853, "acc_norm": 0.7016806722689075, "acc_norm_stderr": 0.029719142876342853 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.3841059602649007, "acc_stderr": 0.03971301814719197, "acc_norm": 0.3841059602649007, "acc_norm_stderr": 0.03971301814719197 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.8422018348623853, "acc_stderr": 0.01563002297009246, "acc_norm": 0.8422018348623853, "acc_norm_stderr": 0.01563002297009246 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.5462962962962963, "acc_stderr": 0.033953227263757976, "acc_norm": 0.5462962962962963, "acc_norm_stderr": 0.033953227263757976 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.8480392156862745, "acc_stderr": 0.025195658428931796, "acc_norm": 0.8480392156862745, "acc_norm_stderr": 0.025195658428931796 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.8396624472573839, "acc_stderr": 0.02388438092596567, "acc_norm": 0.8396624472573839, "acc_norm_stderr": 0.02388438092596567 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.6905829596412556, "acc_stderr": 0.03102441174057222, "acc_norm": 0.6905829596412556, "acc_norm_stderr": 0.03102441174057222 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.7557251908396947, "acc_stderr": 0.037683359597287434, "acc_norm": 0.7557251908396947, "acc_norm_stderr": 0.037683359597287434 }, "harness|hendrycksTest-international_law|5": { "acc": 0.8016528925619835, "acc_stderr": 0.036401182719909456, "acc_norm": 0.8016528925619835, "acc_norm_stderr": 0.036401182719909456 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.7870370370370371, "acc_stderr": 0.0395783547198098, "acc_norm": 0.7870370370370371, "acc_norm_stderr": 0.0395783547198098 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.7239263803680982, "acc_stderr": 0.035123852837050475, "acc_norm": 0.7239263803680982, "acc_norm_stderr": 0.035123852837050475 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.5089285714285714, "acc_stderr": 0.04745033255489122, "acc_norm": 0.5089285714285714, "acc_norm_stderr": 0.04745033255489122 }, "harness|hendrycksTest-management|5": { "acc": 0.8543689320388349, "acc_stderr": 0.034926064766237906, "acc_norm": 0.8543689320388349, "acc_norm_stderr": 0.034926064766237906 }, "harness|hendrycksTest-marketing|5": { "acc": 0.8589743589743589, "acc_stderr": 0.022801382534597528, "acc_norm": 0.8589743589743589, "acc_norm_stderr": 0.022801382534597528 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.71, "acc_stderr": 0.045604802157206845, "acc_norm": 0.71, "acc_norm_stderr": 0.045604802157206845 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.7994891443167306, "acc_stderr": 0.014317653708594202, "acc_norm": 0.7994891443167306, "acc_norm_stderr": 0.014317653708594202 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.7398843930635838, "acc_stderr": 0.023618678310069356, "acc_norm": 0.7398843930635838, "acc_norm_stderr": 0.023618678310069356 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.4346368715083799, "acc_stderr": 0.01657899743549672, "acc_norm": 0.4346368715083799, "acc_norm_stderr": 0.01657899743549672 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.738562091503268, "acc_stderr": 0.025160998214292456, "acc_norm": 0.738562091503268, "acc_norm_stderr": 0.025160998214292456 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.7106109324758842, "acc_stderr": 0.025755865922632945, "acc_norm": 0.7106109324758842, "acc_norm_stderr": 0.025755865922632945 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.7962962962962963, "acc_stderr": 0.02240967454730418, "acc_norm": 0.7962962962962963, "acc_norm_stderr": 0.02240967454730418 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.5106382978723404, "acc_stderr": 0.02982074719142244, "acc_norm": 0.5106382978723404, "acc_norm_stderr": 0.02982074719142244 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.4876140808344198, "acc_stderr": 0.01276631731547356, "acc_norm": 0.4876140808344198, "acc_norm_stderr": 0.01276631731547356 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.7426470588235294, "acc_stderr": 0.026556519470041513, "acc_norm": 0.7426470588235294, "acc_norm_stderr": 0.026556519470041513 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.6748366013071896, "acc_stderr": 0.018950886770806315, "acc_norm": 0.6748366013071896, "acc_norm_stderr": 0.018950886770806315 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.6727272727272727, "acc_stderr": 0.0449429086625209, "acc_norm": 0.6727272727272727, "acc_norm_stderr": 0.0449429086625209 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.7510204081632653, "acc_stderr": 0.027682979522960234, "acc_norm": 0.7510204081632653, "acc_norm_stderr": 0.027682979522960234 }, "harness|hendrycksTest-sociology|5": { "acc": 0.8208955223880597, "acc_stderr": 0.027113286753111837, "acc_norm": 0.8208955223880597, "acc_norm_stderr": 0.027113286753111837 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.86, "acc_stderr": 0.03487350880197768, "acc_norm": 0.86, "acc_norm_stderr": 0.03487350880197768 }, "harness|hendrycksTest-virology|5": { "acc": 0.5783132530120482, "acc_stderr": 0.03844453181770917, "acc_norm": 0.5783132530120482, "acc_norm_stderr": 0.03844453181770917 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.7894736842105263, "acc_stderr": 0.03126781714663179, "acc_norm": 0.7894736842105263, "acc_norm_stderr": 0.03126781714663179 }, "harness|truthfulqa:mc|0": { "mc1": 0.5214198286413708, "mc1_stderr": 0.01748743214471164, "mc2": 0.7223001888589835, "mc2_stderr": 0.014760061606764314 }, "harness|winogrande|5": { "acc": 0.8216258879242304, "acc_stderr": 0.010759352014855934 }, "harness|gsm8k|5": { "acc": 0.5633055344958302, "acc_stderr": 0.013661649780905491 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_cloudyu__19B_TRUTH_DPO
[ "region:us" ]
2024-02-02T05:26:39+00:00
{"pretty_name": "Evaluation run of cloudyu/19B_TRUTH_DPO", "dataset_summary": "Dataset automatically created during the evaluation run of model [cloudyu/19B_TRUTH_DPO](https://huggingface.co/cloudyu/19B_TRUTH_DPO) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_cloudyu__19B_TRUTH_DPO\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-02-02T05:24:23.880496](https://huggingface.co/datasets/open-llm-leaderboard/details_cloudyu__19B_TRUTH_DPO/blob/main/results_2024-02-02T05-24-23.880496.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.6604281536017871,\n \"acc_stderr\": 0.03188067819207853,\n \"acc_norm\": 0.6626612481630715,\n \"acc_norm_stderr\": 0.03252011256731915,\n \"mc1\": 0.5214198286413708,\n \"mc1_stderr\": 0.01748743214471164,\n \"mc2\": 0.7223001888589835,\n \"mc2_stderr\": 0.014760061606764314\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.6979522184300341,\n \"acc_stderr\": 0.013417519144716417,\n \"acc_norm\": 0.7167235494880546,\n \"acc_norm_stderr\": 0.013167478735134575\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.7092212706632145,\n \"acc_stderr\": 0.0045319353915070065,\n \"acc_norm\": 0.8862776339374626,\n \"acc_norm_stderr\": 0.003168249351889306\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.38,\n \"acc_stderr\": 0.048783173121456316,\n \"acc_norm\": 0.38,\n \"acc_norm_stderr\": 0.048783173121456316\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.6370370370370371,\n \"acc_stderr\": 0.04153948404742398,\n \"acc_norm\": 0.6370370370370371,\n \"acc_norm_stderr\": 0.04153948404742398\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.7368421052631579,\n \"acc_stderr\": 0.03583496176361072,\n \"acc_norm\": 0.7368421052631579,\n \"acc_norm_stderr\": 0.03583496176361072\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.74,\n \"acc_stderr\": 0.0440844002276808,\n \"acc_norm\": 0.74,\n \"acc_norm_stderr\": 0.0440844002276808\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.6792452830188679,\n \"acc_stderr\": 0.028727502957880267,\n \"acc_norm\": 0.6792452830188679,\n \"acc_norm_stderr\": 0.028727502957880267\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.7708333333333334,\n \"acc_stderr\": 0.03514697467862388,\n \"acc_norm\": 0.7708333333333334,\n \"acc_norm_stderr\": 0.03514697467862388\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.42,\n \"acc_stderr\": 0.049604496374885836,\n \"acc_norm\": 0.42,\n \"acc_norm_stderr\": 0.049604496374885836\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.46,\n \"acc_stderr\": 0.05009082659620333,\n \"acc_norm\": 0.46,\n \"acc_norm_stderr\": 0.05009082659620333\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.3,\n \"acc_stderr\": 0.046056618647183814,\n \"acc_norm\": 0.3,\n \"acc_norm_stderr\": 0.046056618647183814\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.653179190751445,\n \"acc_stderr\": 0.036291466701596636,\n \"acc_norm\": 0.653179190751445,\n \"acc_norm_stderr\": 0.036291466701596636\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.4117647058823529,\n \"acc_stderr\": 0.04897104952726366,\n \"acc_norm\": 0.4117647058823529,\n \"acc_norm_stderr\": 0.04897104952726366\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.72,\n \"acc_stderr\": 0.04512608598542127,\n \"acc_norm\": 0.72,\n \"acc_norm_stderr\": 0.04512608598542127\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.6297872340425532,\n \"acc_stderr\": 0.03156564682236786,\n \"acc_norm\": 0.6297872340425532,\n \"acc_norm_stderr\": 0.03156564682236786\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.49122807017543857,\n \"acc_stderr\": 0.04702880432049615,\n \"acc_norm\": 0.49122807017543857,\n \"acc_norm_stderr\": 0.04702880432049615\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.6137931034482759,\n \"acc_stderr\": 0.04057324734419035,\n \"acc_norm\": 0.6137931034482759,\n \"acc_norm_stderr\": 0.04057324734419035\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.48677248677248675,\n \"acc_stderr\": 0.025742297289575142,\n \"acc_norm\": 0.48677248677248675,\n \"acc_norm_stderr\": 0.025742297289575142\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.4603174603174603,\n \"acc_stderr\": 0.04458029125470973,\n \"acc_norm\": 0.4603174603174603,\n \"acc_norm_stderr\": 0.04458029125470973\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.35,\n \"acc_stderr\": 0.047937248544110196,\n \"acc_norm\": 0.35,\n \"acc_norm_stderr\": 0.047937248544110196\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.8032258064516129,\n \"acc_stderr\": 0.022616409420742025,\n \"acc_norm\": 0.8032258064516129,\n \"acc_norm_stderr\": 0.022616409420742025\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.5369458128078818,\n \"acc_stderr\": 0.035083705204426656,\n \"acc_norm\": 0.5369458128078818,\n \"acc_norm_stderr\": 0.035083705204426656\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.7,\n \"acc_stderr\": 0.046056618647183814,\n \"acc_norm\": 0.7,\n \"acc_norm_stderr\": 0.046056618647183814\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.8,\n \"acc_stderr\": 0.031234752377721175,\n \"acc_norm\": 0.8,\n \"acc_norm_stderr\": 0.031234752377721175\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.8484848484848485,\n \"acc_stderr\": 0.025545650426603627,\n \"acc_norm\": 0.8484848484848485,\n \"acc_norm_stderr\": 0.025545650426603627\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.8911917098445595,\n \"acc_stderr\": 0.022473253332768766,\n \"acc_norm\": 0.8911917098445595,\n \"acc_norm_stderr\": 0.022473253332768766\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.6717948717948717,\n \"acc_stderr\": 0.023807633198657266,\n \"acc_norm\": 0.6717948717948717,\n \"acc_norm_stderr\": 0.023807633198657266\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.34074074074074073,\n \"acc_stderr\": 0.028897748741131137,\n \"acc_norm\": 0.34074074074074073,\n \"acc_norm_stderr\": 0.028897748741131137\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.7016806722689075,\n \"acc_stderr\": 0.029719142876342853,\n \"acc_norm\": 0.7016806722689075,\n \"acc_norm_stderr\": 0.029719142876342853\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.3841059602649007,\n \"acc_stderr\": 0.03971301814719197,\n \"acc_norm\": 0.3841059602649007,\n \"acc_norm_stderr\": 0.03971301814719197\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.8422018348623853,\n \"acc_stderr\": 0.01563002297009246,\n \"acc_norm\": 0.8422018348623853,\n \"acc_norm_stderr\": 0.01563002297009246\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.5462962962962963,\n \"acc_stderr\": 0.033953227263757976,\n \"acc_norm\": 0.5462962962962963,\n \"acc_norm_stderr\": 0.033953227263757976\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.8480392156862745,\n \"acc_stderr\": 0.025195658428931796,\n \"acc_norm\": 0.8480392156862745,\n \"acc_norm_stderr\": 0.025195658428931796\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.8396624472573839,\n \"acc_stderr\": 0.02388438092596567,\n \"acc_norm\": 0.8396624472573839,\n \"acc_norm_stderr\": 0.02388438092596567\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.6905829596412556,\n \"acc_stderr\": 0.03102441174057222,\n \"acc_norm\": 0.6905829596412556,\n \"acc_norm_stderr\": 0.03102441174057222\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.7557251908396947,\n \"acc_stderr\": 0.037683359597287434,\n \"acc_norm\": 0.7557251908396947,\n \"acc_norm_stderr\": 0.037683359597287434\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.8016528925619835,\n \"acc_stderr\": 0.036401182719909456,\n \"acc_norm\": 0.8016528925619835,\n \"acc_norm_stderr\": 0.036401182719909456\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.7870370370370371,\n \"acc_stderr\": 0.0395783547198098,\n \"acc_norm\": 0.7870370370370371,\n \"acc_norm_stderr\": 0.0395783547198098\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.7239263803680982,\n \"acc_stderr\": 0.035123852837050475,\n \"acc_norm\": 0.7239263803680982,\n \"acc_norm_stderr\": 0.035123852837050475\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.5089285714285714,\n \"acc_stderr\": 0.04745033255489122,\n \"acc_norm\": 0.5089285714285714,\n \"acc_norm_stderr\": 0.04745033255489122\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.8543689320388349,\n \"acc_stderr\": 0.034926064766237906,\n \"acc_norm\": 0.8543689320388349,\n \"acc_norm_stderr\": 0.034926064766237906\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8589743589743589,\n \"acc_stderr\": 0.022801382534597528,\n \"acc_norm\": 0.8589743589743589,\n \"acc_norm_stderr\": 0.022801382534597528\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.71,\n \"acc_stderr\": 0.045604802157206845,\n \"acc_norm\": 0.71,\n \"acc_norm_stderr\": 0.045604802157206845\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.7994891443167306,\n \"acc_stderr\": 0.014317653708594202,\n \"acc_norm\": 0.7994891443167306,\n \"acc_norm_stderr\": 0.014317653708594202\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.7398843930635838,\n \"acc_stderr\": 0.023618678310069356,\n \"acc_norm\": 0.7398843930635838,\n \"acc_norm_stderr\": 0.023618678310069356\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.4346368715083799,\n \"acc_stderr\": 0.01657899743549672,\n \"acc_norm\": 0.4346368715083799,\n \"acc_norm_stderr\": 0.01657899743549672\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.738562091503268,\n \"acc_stderr\": 0.025160998214292456,\n \"acc_norm\": 0.738562091503268,\n \"acc_norm_stderr\": 0.025160998214292456\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.7106109324758842,\n \"acc_stderr\": 0.025755865922632945,\n \"acc_norm\": 0.7106109324758842,\n \"acc_norm_stderr\": 0.025755865922632945\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.7962962962962963,\n \"acc_stderr\": 0.02240967454730418,\n \"acc_norm\": 0.7962962962962963,\n \"acc_norm_stderr\": 0.02240967454730418\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.5106382978723404,\n \"acc_stderr\": 0.02982074719142244,\n \"acc_norm\": 0.5106382978723404,\n \"acc_norm_stderr\": 0.02982074719142244\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.4876140808344198,\n \"acc_stderr\": 0.01276631731547356,\n \"acc_norm\": 0.4876140808344198,\n \"acc_norm_stderr\": 0.01276631731547356\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.7426470588235294,\n \"acc_stderr\": 0.026556519470041513,\n \"acc_norm\": 0.7426470588235294,\n \"acc_norm_stderr\": 0.026556519470041513\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.6748366013071896,\n \"acc_stderr\": 0.018950886770806315,\n \"acc_norm\": 0.6748366013071896,\n \"acc_norm_stderr\": 0.018950886770806315\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6727272727272727,\n \"acc_stderr\": 0.0449429086625209,\n \"acc_norm\": 0.6727272727272727,\n \"acc_norm_stderr\": 0.0449429086625209\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.7510204081632653,\n \"acc_stderr\": 0.027682979522960234,\n \"acc_norm\": 0.7510204081632653,\n \"acc_norm_stderr\": 0.027682979522960234\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.8208955223880597,\n \"acc_stderr\": 0.027113286753111837,\n \"acc_norm\": 0.8208955223880597,\n \"acc_norm_stderr\": 0.027113286753111837\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.86,\n \"acc_stderr\": 0.03487350880197768,\n \"acc_norm\": 0.86,\n \"acc_norm_stderr\": 0.03487350880197768\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.5783132530120482,\n \"acc_stderr\": 0.03844453181770917,\n \"acc_norm\": 0.5783132530120482,\n \"acc_norm_stderr\": 0.03844453181770917\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.7894736842105263,\n \"acc_stderr\": 0.03126781714663179,\n \"acc_norm\": 0.7894736842105263,\n \"acc_norm_stderr\": 0.03126781714663179\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.5214198286413708,\n \"mc1_stderr\": 0.01748743214471164,\n \"mc2\": 0.7223001888589835,\n \"mc2_stderr\": 0.014760061606764314\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.8216258879242304,\n \"acc_stderr\": 0.010759352014855934\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.5633055344958302,\n \"acc_stderr\": 0.013661649780905491\n }\n}\n```", "repo_url": "https://huggingface.co/cloudyu/19B_TRUTH_DPO", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_02_02T05_24_23.880496", "path": ["**/details_harness|arc:challenge|25_2024-02-02T05-24-23.880496.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-02-02T05-24-23.880496.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_02_02T05_24_23.880496", "path": ["**/details_harness|gsm8k|5_2024-02-02T05-24-23.880496.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-02-02T05-24-23.880496.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_02_02T05_24_23.880496", "path": ["**/details_harness|hellaswag|10_2024-02-02T05-24-23.880496.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-02-02T05-24-23.880496.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_02_02T05_24_23.880496", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-02T05-24-23.880496.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-02-02T05-24-23.880496.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-02-02T05-24-23.880496.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-02-02T05-24-23.880496.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-02T05-24-23.880496.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-02-02T05-24-23.880496.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-02-02T05-24-23.880496.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-02-02T05-24-23.880496.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-02-02T05-24-23.880496.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-02-02T05-24-23.880496.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-02-02T05-24-23.880496.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-02-02T05-24-23.880496.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-02T05-24-23.880496.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-02-02T05-24-23.880496.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-02T05-24-23.880496.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-02T05-24-23.880496.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-02-02T05-24-23.880496.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-02-02T05-24-23.880496.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-02-02T05-24-23.880496.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-02T05-24-23.880496.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-02T05-24-23.880496.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-02T05-24-23.880496.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-02-02T05-24-23.880496.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-02T05-24-23.880496.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-02T05-24-23.880496.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-02T05-24-23.880496.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-02T05-24-23.880496.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-02-02T05-24-23.880496.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-02T05-24-23.880496.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-02T05-24-23.880496.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-02T05-24-23.880496.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-02T05-24-23.880496.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-02-02T05-24-23.880496.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-02-02T05-24-23.880496.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-02-02T05-24-23.880496.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-02-02T05-24-23.880496.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-02T05-24-23.880496.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-02-02T05-24-23.880496.parquet", "**/details_harness|hendrycksTest-management|5_2024-02-02T05-24-23.880496.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-02-02T05-24-23.880496.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-02-02T05-24-23.880496.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-02-02T05-24-23.880496.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-02-02T05-24-23.880496.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-02T05-24-23.880496.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-02-02T05-24-23.880496.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-02-02T05-24-23.880496.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-02-02T05-24-23.880496.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-02-02T05-24-23.880496.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-02-02T05-24-23.880496.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-02-02T05-24-23.880496.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-02-02T05-24-23.880496.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-02-02T05-24-23.880496.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-02-02T05-24-23.880496.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-02-02T05-24-23.880496.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-02T05-24-23.880496.parquet", "**/details_harness|hendrycksTest-virology|5_2024-02-02T05-24-23.880496.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-02-02T05-24-23.880496.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-02T05-24-23.880496.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-02-02T05-24-23.880496.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-02-02T05-24-23.880496.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-02-02T05-24-23.880496.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-02T05-24-23.880496.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-02-02T05-24-23.880496.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-02-02T05-24-23.880496.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-02-02T05-24-23.880496.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-02-02T05-24-23.880496.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-02-02T05-24-23.880496.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-02-02T05-24-23.880496.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-02-02T05-24-23.880496.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-02T05-24-23.880496.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-02-02T05-24-23.880496.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-02T05-24-23.880496.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-02T05-24-23.880496.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-02-02T05-24-23.880496.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-02-02T05-24-23.880496.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-02-02T05-24-23.880496.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-02T05-24-23.880496.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-02T05-24-23.880496.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-02T05-24-23.880496.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-02-02T05-24-23.880496.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-02T05-24-23.880496.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-02T05-24-23.880496.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-02T05-24-23.880496.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-02T05-24-23.880496.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-02-02T05-24-23.880496.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-02T05-24-23.880496.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-02T05-24-23.880496.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-02T05-24-23.880496.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-02T05-24-23.880496.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-02-02T05-24-23.880496.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-02-02T05-24-23.880496.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-02-02T05-24-23.880496.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-02-02T05-24-23.880496.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-02T05-24-23.880496.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-02-02T05-24-23.880496.parquet", "**/details_harness|hendrycksTest-management|5_2024-02-02T05-24-23.880496.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-02-02T05-24-23.880496.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-02-02T05-24-23.880496.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-02-02T05-24-23.880496.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-02-02T05-24-23.880496.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-02T05-24-23.880496.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-02-02T05-24-23.880496.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-02-02T05-24-23.880496.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-02-02T05-24-23.880496.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-02-02T05-24-23.880496.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-02-02T05-24-23.880496.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-02-02T05-24-23.880496.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-02-02T05-24-23.880496.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-02-02T05-24-23.880496.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-02-02T05-24-23.880496.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-02-02T05-24-23.880496.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-02T05-24-23.880496.parquet", "**/details_harness|hendrycksTest-virology|5_2024-02-02T05-24-23.880496.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-02-02T05-24-23.880496.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_02_02T05_24_23.880496", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-02T05-24-23.880496.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-02T05-24-23.880496.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_02_02T05_24_23.880496", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-02-02T05-24-23.880496.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-02-02T05-24-23.880496.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_02_02T05_24_23.880496", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-02-02T05-24-23.880496.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-02-02T05-24-23.880496.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_02_02T05_24_23.880496", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-02-02T05-24-23.880496.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-02-02T05-24-23.880496.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_02_02T05_24_23.880496", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-02T05-24-23.880496.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-02T05-24-23.880496.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_02_02T05_24_23.880496", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-02-02T05-24-23.880496.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-02-02T05-24-23.880496.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_02_02T05_24_23.880496", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-02-02T05-24-23.880496.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-02-02T05-24-23.880496.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_02_02T05_24_23.880496", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-02-02T05-24-23.880496.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-02-02T05-24-23.880496.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_02_02T05_24_23.880496", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-02-02T05-24-23.880496.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-02-02T05-24-23.880496.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_02_02T05_24_23.880496", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-02-02T05-24-23.880496.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-02-02T05-24-23.880496.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_02_02T05_24_23.880496", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-02-02T05-24-23.880496.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-02-02T05-24-23.880496.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_02_02T05_24_23.880496", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-02-02T05-24-23.880496.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-02-02T05-24-23.880496.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_02_02T05_24_23.880496", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-02T05-24-23.880496.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-02T05-24-23.880496.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_02_02T05_24_23.880496", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-02-02T05-24-23.880496.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-02-02T05-24-23.880496.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_02_02T05_24_23.880496", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-02T05-24-23.880496.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-02T05-24-23.880496.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_02_02T05_24_23.880496", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-02T05-24-23.880496.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-02T05-24-23.880496.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_02_02T05_24_23.880496", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-02-02T05-24-23.880496.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-02-02T05-24-23.880496.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_02_02T05_24_23.880496", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-02-02T05-24-23.880496.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-02-02T05-24-23.880496.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_02_02T05_24_23.880496", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-02-02T05-24-23.880496.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-02-02T05-24-23.880496.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_02_02T05_24_23.880496", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-02T05-24-23.880496.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-02T05-24-23.880496.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_02_02T05_24_23.880496", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-02T05-24-23.880496.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-02T05-24-23.880496.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_02_02T05_24_23.880496", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-02T05-24-23.880496.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-02T05-24-23.880496.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_02_02T05_24_23.880496", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-02-02T05-24-23.880496.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-02-02T05-24-23.880496.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_02_02T05_24_23.880496", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-02T05-24-23.880496.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-02T05-24-23.880496.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_02_02T05_24_23.880496", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-02T05-24-23.880496.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-02T05-24-23.880496.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_02_02T05_24_23.880496", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-02T05-24-23.880496.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-02T05-24-23.880496.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_02_02T05_24_23.880496", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-02T05-24-23.880496.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-02T05-24-23.880496.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_02_02T05_24_23.880496", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-02-02T05-24-23.880496.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-02-02T05-24-23.880496.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_02_02T05_24_23.880496", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-02T05-24-23.880496.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-02T05-24-23.880496.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_02_02T05_24_23.880496", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-02T05-24-23.880496.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-02T05-24-23.880496.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_02_02T05_24_23.880496", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-02T05-24-23.880496.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-02T05-24-23.880496.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_02_02T05_24_23.880496", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-02T05-24-23.880496.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-02T05-24-23.880496.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_02_02T05_24_23.880496", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-02-02T05-24-23.880496.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-02-02T05-24-23.880496.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_02_02T05_24_23.880496", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-02-02T05-24-23.880496.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-02-02T05-24-23.880496.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_02_02T05_24_23.880496", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-02-02T05-24-23.880496.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-02-02T05-24-23.880496.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_02_02T05_24_23.880496", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-02-02T05-24-23.880496.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-02-02T05-24-23.880496.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_02_02T05_24_23.880496", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-02T05-24-23.880496.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-02T05-24-23.880496.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_02_02T05_24_23.880496", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-02-02T05-24-23.880496.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-02-02T05-24-23.880496.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_02_02T05_24_23.880496", "path": ["**/details_harness|hendrycksTest-management|5_2024-02-02T05-24-23.880496.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-02-02T05-24-23.880496.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_02_02T05_24_23.880496", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-02-02T05-24-23.880496.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-02-02T05-24-23.880496.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_02_02T05_24_23.880496", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-02-02T05-24-23.880496.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-02-02T05-24-23.880496.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_02_02T05_24_23.880496", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-02-02T05-24-23.880496.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-02-02T05-24-23.880496.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_02_02T05_24_23.880496", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-02-02T05-24-23.880496.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-02-02T05-24-23.880496.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_02_02T05_24_23.880496", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-02T05-24-23.880496.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-02T05-24-23.880496.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_02_02T05_24_23.880496", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-02-02T05-24-23.880496.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-02-02T05-24-23.880496.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_02_02T05_24_23.880496", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-02-02T05-24-23.880496.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-02-02T05-24-23.880496.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_02_02T05_24_23.880496", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-02-02T05-24-23.880496.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-02-02T05-24-23.880496.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_02_02T05_24_23.880496", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-02-02T05-24-23.880496.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-02-02T05-24-23.880496.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_02_02T05_24_23.880496", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-02-02T05-24-23.880496.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-02-02T05-24-23.880496.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_02_02T05_24_23.880496", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-02-02T05-24-23.880496.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-02-02T05-24-23.880496.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_02_02T05_24_23.880496", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-02-02T05-24-23.880496.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-02-02T05-24-23.880496.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_02_02T05_24_23.880496", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-02-02T05-24-23.880496.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-02-02T05-24-23.880496.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_02_02T05_24_23.880496", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-02-02T05-24-23.880496.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-02-02T05-24-23.880496.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_02_02T05_24_23.880496", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-02-02T05-24-23.880496.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-02-02T05-24-23.880496.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_02_02T05_24_23.880496", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-02T05-24-23.880496.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-02T05-24-23.880496.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_02_02T05_24_23.880496", "path": ["**/details_harness|hendrycksTest-virology|5_2024-02-02T05-24-23.880496.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-02-02T05-24-23.880496.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_02_02T05_24_23.880496", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-02-02T05-24-23.880496.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-02-02T05-24-23.880496.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_02_02T05_24_23.880496", "path": ["**/details_harness|truthfulqa:mc|0_2024-02-02T05-24-23.880496.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-02-02T05-24-23.880496.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_02_02T05_24_23.880496", "path": ["**/details_harness|winogrande|5_2024-02-02T05-24-23.880496.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-02-02T05-24-23.880496.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_02_02T05_24_23.880496", "path": ["results_2024-02-02T05-24-23.880496.parquet"]}, {"split": "latest", "path": ["results_2024-02-02T05-24-23.880496.parquet"]}]}]}
2024-02-02T05:27:03+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of cloudyu/19B_TRUTH_DPO Dataset automatically created during the evaluation run of model cloudyu/19B_TRUTH_DPO on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-02-02T05:24:23.880496(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of cloudyu/19B_TRUTH_DPO\n\n\n\nDataset automatically created during the evaluation run of model cloudyu/19B_TRUTH_DPO on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-02-02T05:24:23.880496(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of cloudyu/19B_TRUTH_DPO\n\n\n\nDataset automatically created during the evaluation run of model cloudyu/19B_TRUTH_DPO on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-02-02T05:24:23.880496(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
0ffec5264ccabcee555084d15a523d00034ffd14
CS 482 Assignment-1B February 2, 2024
rlopez3341/assignment-1b
[ "language:en", "code", "region:us" ]
2024-02-02T05:34:47+00:00
{"language": ["en"], "tags": ["code"]}
2024-02-02T05:44:36+00:00
[]
[ "en" ]
TAGS #language-English #code #region-us
CS 482 Assignment-1B February 2, 2024
[]
[ "TAGS\n#language-English #code #region-us \n" ]
db967dcf45bdbd1e363c94359850e4f9bafd2b3c
# Dataset Card for argilla-dataset This dataset has been created with [Argilla](https://docs.argilla.io). As shown in the sections below, this dataset can be loaded into Argilla as explained in [Load with Argilla](#load-with-argilla), or used directly with the `datasets` library in [Load with `datasets`](#load-with-datasets). ## Dataset Description - **Homepage:** https://argilla.io - **Repository:** https://github.com/argilla-io/argilla - **Paper:** - **Leaderboard:** - **Point of Contact:** ### Dataset Summary This dataset contains: * A dataset configuration file conforming to the Argilla dataset format named `argilla.yaml`. This configuration file will be used to configure the dataset when using the `FeedbackDataset.from_huggingface` method in Argilla. * Dataset records in a format compatible with HuggingFace `datasets`. These records will be loaded automatically when using `FeedbackDataset.from_huggingface` and can be loaded independently using the `datasets` library via `load_dataset`. * The [annotation guidelines](#annotation-guidelines) that have been used for building and curating the dataset, if they've been defined in Argilla. ### Load with Argilla To load with Argilla, you'll just need to install Argilla as `pip install argilla --upgrade` and then use the following code: ```python import argilla as rg ds = rg.FeedbackDataset.from_huggingface("arkamaldeen/argilla-dataset") ``` ### Load with `datasets` To load this dataset with `datasets`, you'll just need to install `datasets` as `pip install datasets --upgrade` and then use the following code: ```python from datasets import load_dataset ds = load_dataset("arkamaldeen/argilla-dataset") ``` ### Supported Tasks and Leaderboards This dataset can contain [multiple fields, questions and responses](https://docs.argilla.io/en/latest/conceptual_guides/data_model.html#feedback-dataset) so it can be used for different NLP tasks, depending on the configuration. The dataset structure is described in the [Dataset Structure section](#dataset-structure). There are no leaderboards associated with this dataset. ### Languages [More Information Needed] ## Dataset Structure ### Data in Argilla The dataset is created in Argilla with: **fields**, **questions**, **suggestions**, **metadata**, **vectors**, and **guidelines**. The **fields** are the dataset records themselves, for the moment just text fields are supported. These are the ones that will be used to provide responses to the questions. | Field Name | Title | Type | Required | Markdown | | ---------- | ----- | ---- | -------- | -------- | | text | Text | text | True | False | The **questions** are the questions that will be asked to the annotators. They can be of different types, such as rating, text, label_selection, multi_label_selection, or ranking. | Question Name | Title | Type | Required | Description | Values/Labels | | ------------- | ----- | ---- | -------- | ----------- | ------------- | | validate | Validate | label_selection | True | N/A | ['select', 'unselect'] | The **suggestions** are human or machine generated recommendations for each question to assist the annotator during the annotation process, so those are always linked to the existing questions, and named appending "-suggestion" and "-suggestion-metadata" to those, containing the value/s of the suggestion and its metadata, respectively. So on, the possible values are the same as in the table above, but the column name is appended with "-suggestion" and the metadata is appended with "-suggestion-metadata". The **metadata** is a dictionary that can be used to provide additional information about the dataset record. This can be useful to provide additional context to the annotators, or to provide additional information about the dataset record itself. For example, you can use this to provide a link to the original source of the dataset record, or to provide additional information about the dataset record itself, such as the author, the date, or the source. The metadata is always optional, and can be potentially linked to the `metadata_properties` defined in the dataset configuration file in `argilla.yaml`. | Metadata Name | Title | Type | Values | Visible for Annotators | | ------------- | ----- | ---- | ------ | ---------------------- | The **guidelines**, are optional as well, and are just a plain string that can be used to provide instructions to the annotators. Find those in the [annotation guidelines](#annotation-guidelines) section. ### Data Instances An example of a dataset instance in Argilla looks as follows: ```json { "external_id": null, "fields": { "text": "\u0b13\u0b21\u0b3f\u0b36\u0b3e\u0b30 \u0b2a\u0b3e\u0b30\u0b2e\u0b4d\u0b2a\u0b3e\u0b30\u0b3f\u0b15 \u0b36\u0b2c\u0b26\u0b3e\u0b39 \u0b30\u0b40\u0b24\u0b3f\u0b28\u0b40\u0b24\u0b3f \u0b0f\u0b2c\u0b02 \u0b05\u0b28\u0b4d\u0b24\u0b3f\u0b2e\u0b38\u0b02\u0b38\u0b4d\u0b15\u0b3e\u0b30" }, "metadata": {}, "responses": [ { "status": "submitted", "user_id": "0cd57185-cddd-408b-b469-2f0198f7c5e1", "values": { "validate": { "value": "unselect" } } } ], "suggestions": [], "vectors": {} } ``` While the same record in HuggingFace `datasets` looks as follows: ```json { "external_id": null, "metadata": "{}", "text": "\u0b13\u0b21\u0b3f\u0b36\u0b3e\u0b30 \u0b2a\u0b3e\u0b30\u0b2e\u0b4d\u0b2a\u0b3e\u0b30\u0b3f\u0b15 \u0b36\u0b2c\u0b26\u0b3e\u0b39 \u0b30\u0b40\u0b24\u0b3f\u0b28\u0b40\u0b24\u0b3f \u0b0f\u0b2c\u0b02 \u0b05\u0b28\u0b4d\u0b24\u0b3f\u0b2e\u0b38\u0b02\u0b38\u0b4d\u0b15\u0b3e\u0b30", "validate": [ { "status": "submitted", "user_id": "0cd57185-cddd-408b-b469-2f0198f7c5e1", "value": "unselect" } ], "validate-suggestion": null, "validate-suggestion-metadata": { "agent": null, "score": null, "type": null } } ``` ### Data Fields Among the dataset fields, we differentiate between the following: * **Fields:** These are the dataset records themselves, for the moment just text fields are supported. These are the ones that will be used to provide responses to the questions. * **text** is of type `text`. * **Questions:** These are the questions that will be asked to the annotators. They can be of different types, such as `RatingQuestion`, `TextQuestion`, `LabelQuestion`, `MultiLabelQuestion`, and `RankingQuestion`. * **validate** is of type `label_selection` with the following allowed values ['select', 'unselect']. * **Suggestions:** As of Argilla 1.13.0, the suggestions have been included to provide the annotators with suggestions to ease or assist during the annotation process. Suggestions are linked to the existing questions, are always optional, and contain not just the suggestion itself, but also the metadata linked to it, if applicable. * (optional) **validate-suggestion** is of type `label_selection` with the following allowed values ['select', 'unselect']. Additionally, we also have two more fields that are optional and are the following: * **metadata:** This is an optional field that can be used to provide additional information about the dataset record. This can be useful to provide additional context to the annotators, or to provide additional information about the dataset record itself. For example, you can use this to provide a link to the original source of the dataset record, or to provide additional information about the dataset record itself, such as the author, the date, or the source. The metadata is always optional, and can be potentially linked to the `metadata_properties` defined in the dataset configuration file in `argilla.yaml`. * **external_id:** This is an optional field that can be used to provide an external ID for the dataset record. This can be useful if you want to link the dataset record to an external resource, such as a database or a file. ### Data Splits The dataset contains a single split, which is `train`. ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation guidelines [More Information Needed] #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
arkamaldeen/argilla-dataset
[ "size_categories:n<1K", "rlfh", "argilla", "human-feedback", "region:us" ]
2024-02-02T05:59:24+00:00
{"size_categories": "n<1K", "tags": ["rlfh", "argilla", "human-feedback"]}
2024-02-02T09:33:07+00:00
[]
[]
TAGS #size_categories-n<1K #rlfh #argilla #human-feedback #region-us
Dataset Card for argilla-dataset ================================ This dataset has been created with Argilla. As shown in the sections below, this dataset can be loaded into Argilla as explained in Load with Argilla, or used directly with the 'datasets' library in Load with 'datasets'. Dataset Description ------------------- * Homepage: URL * Repository: URL * Paper: * Leaderboard: * Point of Contact: ### Dataset Summary This dataset contains: * A dataset configuration file conforming to the Argilla dataset format named 'URL'. This configuration file will be used to configure the dataset when using the 'FeedbackDataset.from\_huggingface' method in Argilla. * Dataset records in a format compatible with HuggingFace 'datasets'. These records will be loaded automatically when using 'FeedbackDataset.from\_huggingface' and can be loaded independently using the 'datasets' library via 'load\_dataset'. * The annotation guidelines that have been used for building and curating the dataset, if they've been defined in Argilla. ### Load with Argilla To load with Argilla, you'll just need to install Argilla as 'pip install argilla --upgrade' and then use the following code: ### Load with 'datasets' To load this dataset with 'datasets', you'll just need to install 'datasets' as 'pip install datasets --upgrade' and then use the following code: ### Supported Tasks and Leaderboards This dataset can contain multiple fields, questions and responses so it can be used for different NLP tasks, depending on the configuration. The dataset structure is described in the Dataset Structure section. There are no leaderboards associated with this dataset. ### Languages Dataset Structure ----------------- ### Data in Argilla The dataset is created in Argilla with: fields, questions, suggestions, metadata, vectors, and guidelines. The fields are the dataset records themselves, for the moment just text fields are supported. These are the ones that will be used to provide responses to the questions. The questions are the questions that will be asked to the annotators. They can be of different types, such as rating, text, label\_selection, multi\_label\_selection, or ranking. The suggestions are human or machine generated recommendations for each question to assist the annotator during the annotation process, so those are always linked to the existing questions, and named appending "-suggestion" and "-suggestion-metadata" to those, containing the value/s of the suggestion and its metadata, respectively. So on, the possible values are the same as in the table above, but the column name is appended with "-suggestion" and the metadata is appended with "-suggestion-metadata". The metadata is a dictionary that can be used to provide additional information about the dataset record. This can be useful to provide additional context to the annotators, or to provide additional information about the dataset record itself. For example, you can use this to provide a link to the original source of the dataset record, or to provide additional information about the dataset record itself, such as the author, the date, or the source. The metadata is always optional, and can be potentially linked to the 'metadata\_properties' defined in the dataset configuration file in 'URL'. The guidelines, are optional as well, and are just a plain string that can be used to provide instructions to the annotators. Find those in the annotation guidelines section. ### Data Instances An example of a dataset instance in Argilla looks as follows: While the same record in HuggingFace 'datasets' looks as follows: ### Data Fields Among the dataset fields, we differentiate between the following: * Fields: These are the dataset records themselves, for the moment just text fields are supported. These are the ones that will be used to provide responses to the questions. + text is of type 'text'. * Questions: These are the questions that will be asked to the annotators. They can be of different types, such as 'RatingQuestion', 'TextQuestion', 'LabelQuestion', 'MultiLabelQuestion', and 'RankingQuestion'. + validate is of type 'label\_selection' with the following allowed values ['select', 'unselect']. * Suggestions: As of Argilla 1.13.0, the suggestions have been included to provide the annotators with suggestions to ease or assist during the annotation process. Suggestions are linked to the existing questions, are always optional, and contain not just the suggestion itself, but also the metadata linked to it, if applicable. + (optional) validate-suggestion is of type 'label\_selection' with the following allowed values ['select', 'unselect']. Additionally, we also have two more fields that are optional and are the following: * metadata: This is an optional field that can be used to provide additional information about the dataset record. This can be useful to provide additional context to the annotators, or to provide additional information about the dataset record itself. For example, you can use this to provide a link to the original source of the dataset record, or to provide additional information about the dataset record itself, such as the author, the date, or the source. The metadata is always optional, and can be potentially linked to the 'metadata\_properties' defined in the dataset configuration file in 'URL'. * external\_id: This is an optional field that can be used to provide an external ID for the dataset record. This can be useful if you want to link the dataset record to an external resource, such as a database or a file. ### Data Splits The dataset contains a single split, which is 'train'. Dataset Creation ---------------- ### Curation Rationale ### Source Data #### Initial Data Collection and Normalization #### Who are the source language producers? ### Annotations #### Annotation guidelines #### Annotation process #### Who are the annotators? ### Personal and Sensitive Information Considerations for Using the Data --------------------------------- ### Social Impact of Dataset ### Discussion of Biases ### Other Known Limitations Additional Information ---------------------- ### Dataset Curators ### Licensing Information ### Contributions
[ "### Dataset Summary\n\n\nThis dataset contains:\n\n\n* A dataset configuration file conforming to the Argilla dataset format named 'URL'. This configuration file will be used to configure the dataset when using the 'FeedbackDataset.from\\_huggingface' method in Argilla.\n* Dataset records in a format compatible with HuggingFace 'datasets'. These records will be loaded automatically when using 'FeedbackDataset.from\\_huggingface' and can be loaded independently using the 'datasets' library via 'load\\_dataset'.\n* The annotation guidelines that have been used for building and curating the dataset, if they've been defined in Argilla.", "### Load with Argilla\n\n\nTo load with Argilla, you'll just need to install Argilla as 'pip install argilla --upgrade' and then use the following code:", "### Load with 'datasets'\n\n\nTo load this dataset with 'datasets', you'll just need to install 'datasets' as 'pip install datasets --upgrade' and then use the following code:", "### Supported Tasks and Leaderboards\n\n\nThis dataset can contain multiple fields, questions and responses so it can be used for different NLP tasks, depending on the configuration. The dataset structure is described in the Dataset Structure section.\n\n\nThere are no leaderboards associated with this dataset.", "### Languages\n\n\nDataset Structure\n-----------------", "### Data in Argilla\n\n\nThe dataset is created in Argilla with: fields, questions, suggestions, metadata, vectors, and guidelines.\n\n\nThe fields are the dataset records themselves, for the moment just text fields are supported. These are the ones that will be used to provide responses to the questions.\n\n\n\nThe questions are the questions that will be asked to the annotators. They can be of different types, such as rating, text, label\\_selection, multi\\_label\\_selection, or ranking.\n\n\n\nThe suggestions are human or machine generated recommendations for each question to assist the annotator during the annotation process, so those are always linked to the existing questions, and named appending \"-suggestion\" and \"-suggestion-metadata\" to those, containing the value/s of the suggestion and its metadata, respectively. So on, the possible values are the same as in the table above, but the column name is appended with \"-suggestion\" and the metadata is appended with \"-suggestion-metadata\".\n\n\nThe metadata is a dictionary that can be used to provide additional information about the dataset record. This can be useful to provide additional context to the annotators, or to provide additional information about the dataset record itself. For example, you can use this to provide a link to the original source of the dataset record, or to provide additional information about the dataset record itself, such as the author, the date, or the source. The metadata is always optional, and can be potentially linked to the 'metadata\\_properties' defined in the dataset configuration file in 'URL'.\n\n\n\nThe guidelines, are optional as well, and are just a plain string that can be used to provide instructions to the annotators. Find those in the annotation guidelines section.", "### Data Instances\n\n\nAn example of a dataset instance in Argilla looks as follows:\n\n\nWhile the same record in HuggingFace 'datasets' looks as follows:", "### Data Fields\n\n\nAmong the dataset fields, we differentiate between the following:\n\n\n* Fields: These are the dataset records themselves, for the moment just text fields are supported. These are the ones that will be used to provide responses to the questions.\n\n\n\t+ text is of type 'text'.\n* Questions: These are the questions that will be asked to the annotators. They can be of different types, such as 'RatingQuestion', 'TextQuestion', 'LabelQuestion', 'MultiLabelQuestion', and 'RankingQuestion'.\n\n\n\t+ validate is of type 'label\\_selection' with the following allowed values ['select', 'unselect'].\n* Suggestions: As of Argilla 1.13.0, the suggestions have been included to provide the annotators with suggestions to ease or assist during the annotation process. Suggestions are linked to the existing questions, are always optional, and contain not just the suggestion itself, but also the metadata linked to it, if applicable.\n\n\n\t+ (optional) validate-suggestion is of type 'label\\_selection' with the following allowed values ['select', 'unselect'].\n\n\nAdditionally, we also have two more fields that are optional and are the following:\n\n\n* metadata: This is an optional field that can be used to provide additional information about the dataset record. This can be useful to provide additional context to the annotators, or to provide additional information about the dataset record itself. For example, you can use this to provide a link to the original source of the dataset record, or to provide additional information about the dataset record itself, such as the author, the date, or the source. The metadata is always optional, and can be potentially linked to the 'metadata\\_properties' defined in the dataset configuration file in 'URL'.\n* external\\_id: This is an optional field that can be used to provide an external ID for the dataset record. This can be useful if you want to link the dataset record to an external resource, such as a database or a file.", "### Data Splits\n\n\nThe dataset contains a single split, which is 'train'.\n\n\nDataset Creation\n----------------", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation guidelines", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information\n\n\nConsiderations for Using the Data\n---------------------------------", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations\n\n\nAdditional Information\n----------------------", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ "TAGS\n#size_categories-n<1K #rlfh #argilla #human-feedback #region-us \n", "### Dataset Summary\n\n\nThis dataset contains:\n\n\n* A dataset configuration file conforming to the Argilla dataset format named 'URL'. This configuration file will be used to configure the dataset when using the 'FeedbackDataset.from\\_huggingface' method in Argilla.\n* Dataset records in a format compatible with HuggingFace 'datasets'. These records will be loaded automatically when using 'FeedbackDataset.from\\_huggingface' and can be loaded independently using the 'datasets' library via 'load\\_dataset'.\n* The annotation guidelines that have been used for building and curating the dataset, if they've been defined in Argilla.", "### Load with Argilla\n\n\nTo load with Argilla, you'll just need to install Argilla as 'pip install argilla --upgrade' and then use the following code:", "### Load with 'datasets'\n\n\nTo load this dataset with 'datasets', you'll just need to install 'datasets' as 'pip install datasets --upgrade' and then use the following code:", "### Supported Tasks and Leaderboards\n\n\nThis dataset can contain multiple fields, questions and responses so it can be used for different NLP tasks, depending on the configuration. The dataset structure is described in the Dataset Structure section.\n\n\nThere are no leaderboards associated with this dataset.", "### Languages\n\n\nDataset Structure\n-----------------", "### Data in Argilla\n\n\nThe dataset is created in Argilla with: fields, questions, suggestions, metadata, vectors, and guidelines.\n\n\nThe fields are the dataset records themselves, for the moment just text fields are supported. These are the ones that will be used to provide responses to the questions.\n\n\n\nThe questions are the questions that will be asked to the annotators. They can be of different types, such as rating, text, label\\_selection, multi\\_label\\_selection, or ranking.\n\n\n\nThe suggestions are human or machine generated recommendations for each question to assist the annotator during the annotation process, so those are always linked to the existing questions, and named appending \"-suggestion\" and \"-suggestion-metadata\" to those, containing the value/s of the suggestion and its metadata, respectively. So on, the possible values are the same as in the table above, but the column name is appended with \"-suggestion\" and the metadata is appended with \"-suggestion-metadata\".\n\n\nThe metadata is a dictionary that can be used to provide additional information about the dataset record. This can be useful to provide additional context to the annotators, or to provide additional information about the dataset record itself. For example, you can use this to provide a link to the original source of the dataset record, or to provide additional information about the dataset record itself, such as the author, the date, or the source. The metadata is always optional, and can be potentially linked to the 'metadata\\_properties' defined in the dataset configuration file in 'URL'.\n\n\n\nThe guidelines, are optional as well, and are just a plain string that can be used to provide instructions to the annotators. Find those in the annotation guidelines section.", "### Data Instances\n\n\nAn example of a dataset instance in Argilla looks as follows:\n\n\nWhile the same record in HuggingFace 'datasets' looks as follows:", "### Data Fields\n\n\nAmong the dataset fields, we differentiate between the following:\n\n\n* Fields: These are the dataset records themselves, for the moment just text fields are supported. These are the ones that will be used to provide responses to the questions.\n\n\n\t+ text is of type 'text'.\n* Questions: These are the questions that will be asked to the annotators. They can be of different types, such as 'RatingQuestion', 'TextQuestion', 'LabelQuestion', 'MultiLabelQuestion', and 'RankingQuestion'.\n\n\n\t+ validate is of type 'label\\_selection' with the following allowed values ['select', 'unselect'].\n* Suggestions: As of Argilla 1.13.0, the suggestions have been included to provide the annotators with suggestions to ease or assist during the annotation process. Suggestions are linked to the existing questions, are always optional, and contain not just the suggestion itself, but also the metadata linked to it, if applicable.\n\n\n\t+ (optional) validate-suggestion is of type 'label\\_selection' with the following allowed values ['select', 'unselect'].\n\n\nAdditionally, we also have two more fields that are optional and are the following:\n\n\n* metadata: This is an optional field that can be used to provide additional information about the dataset record. This can be useful to provide additional context to the annotators, or to provide additional information about the dataset record itself. For example, you can use this to provide a link to the original source of the dataset record, or to provide additional information about the dataset record itself, such as the author, the date, or the source. The metadata is always optional, and can be potentially linked to the 'metadata\\_properties' defined in the dataset configuration file in 'URL'.\n* external\\_id: This is an optional field that can be used to provide an external ID for the dataset record. This can be useful if you want to link the dataset record to an external resource, such as a database or a file.", "### Data Splits\n\n\nThe dataset contains a single split, which is 'train'.\n\n\nDataset Creation\n----------------", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation guidelines", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information\n\n\nConsiderations for Using the Data\n---------------------------------", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations\n\n\nAdditional Information\n----------------------", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
fc07cb9a0005c6f48dbb164cffacb43838cd4296
# Dataset Card for Evaluation run of karakuri-ai/karakuri-lm-70b-chat-v0.1 <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [karakuri-ai/karakuri-lm-70b-chat-v0.1](https://huggingface.co/karakuri-ai/karakuri-lm-70b-chat-v0.1) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_karakuri-ai__karakuri-lm-70b-chat-v0.1", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-02-02T05:58:35.536012](https://huggingface.co/datasets/open-llm-leaderboard/details_karakuri-ai__karakuri-lm-70b-chat-v0.1/blob/main/results_2024-02-02T05-58-35.536012.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.5939008681138529, "acc_stderr": 0.0331497474958087, "acc_norm": 0.5979307998153371, "acc_norm_stderr": 0.033826160874214035, "mc1": 0.36474908200734396, "mc1_stderr": 0.016850961061720116, "mc2": 0.513917774256522, "mc2_stderr": 0.014972875043047422 }, "harness|arc:challenge|25": { "acc": 0.5716723549488054, "acc_stderr": 0.014460496367599017, "acc_norm": 0.6151877133105802, "acc_norm_stderr": 0.014218371065251104 }, "harness|hellaswag|10": { "acc": 0.637024497112129, "acc_stderr": 0.0047987512815608376, "acc_norm": 0.8313085042820155, "acc_norm_stderr": 0.003737138752336941 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.29, "acc_stderr": 0.045604802157206845, "acc_norm": 0.29, "acc_norm_stderr": 0.045604802157206845 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.4888888888888889, "acc_stderr": 0.04318275491977976, "acc_norm": 0.4888888888888889, "acc_norm_stderr": 0.04318275491977976 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.6710526315789473, "acc_stderr": 0.038234289699266046, "acc_norm": 0.6710526315789473, "acc_norm_stderr": 0.038234289699266046 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.6, "acc_stderr": 0.04923659639173309, "acc_norm": 0.6, "acc_norm_stderr": 0.04923659639173309 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.6188679245283019, "acc_stderr": 0.029890609686286644, "acc_norm": 0.6188679245283019, "acc_norm_stderr": 0.029890609686286644 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.6597222222222222, "acc_stderr": 0.039621355734862175, "acc_norm": 0.6597222222222222, "acc_norm_stderr": 0.039621355734862175 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.39, "acc_stderr": 0.04902071300001975, "acc_norm": 0.39, "acc_norm_stderr": 0.04902071300001975 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.46, "acc_stderr": 0.05009082659620332, "acc_norm": 0.46, "acc_norm_stderr": 0.05009082659620332 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.35, "acc_stderr": 0.0479372485441102, "acc_norm": 0.35, "acc_norm_stderr": 0.0479372485441102 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.5780346820809249, "acc_stderr": 0.0376574669386515, "acc_norm": 0.5780346820809249, "acc_norm_stderr": 0.0376574669386515 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.28431372549019607, "acc_stderr": 0.04488482852329017, "acc_norm": 0.28431372549019607, "acc_norm_stderr": 0.04488482852329017 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.64, "acc_stderr": 0.04824181513244218, "acc_norm": 0.64, "acc_norm_stderr": 0.04824181513244218 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.5106382978723404, "acc_stderr": 0.03267862331014063, "acc_norm": 0.5106382978723404, "acc_norm_stderr": 0.03267862331014063 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.41228070175438597, "acc_stderr": 0.046306532033665956, "acc_norm": 0.41228070175438597, "acc_norm_stderr": 0.046306532033665956 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.5448275862068965, "acc_stderr": 0.04149886942192118, "acc_norm": 0.5448275862068965, "acc_norm_stderr": 0.04149886942192118 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.37037037037037035, "acc_stderr": 0.024870815251057093, "acc_norm": 0.37037037037037035, "acc_norm_stderr": 0.024870815251057093 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.3492063492063492, "acc_stderr": 0.04263906892795132, "acc_norm": 0.3492063492063492, "acc_norm_stderr": 0.04263906892795132 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.41, "acc_stderr": 0.049431107042371025, "acc_norm": 0.41, "acc_norm_stderr": 0.049431107042371025 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.6774193548387096, "acc_stderr": 0.02659308451657227, "acc_norm": 0.6774193548387096, "acc_norm_stderr": 0.02659308451657227 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.3891625615763547, "acc_stderr": 0.034304624161038716, "acc_norm": 0.3891625615763547, "acc_norm_stderr": 0.034304624161038716 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.69, "acc_stderr": 0.04648231987117316, "acc_norm": 0.69, "acc_norm_stderr": 0.04648231987117316 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.7696969696969697, "acc_stderr": 0.0328766675860349, "acc_norm": 0.7696969696969697, "acc_norm_stderr": 0.0328766675860349 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.7424242424242424, "acc_stderr": 0.031156269519646836, "acc_norm": 0.7424242424242424, "acc_norm_stderr": 0.031156269519646836 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.8082901554404145, "acc_stderr": 0.028408953626245265, "acc_norm": 0.8082901554404145, "acc_norm_stderr": 0.028408953626245265 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.6128205128205129, "acc_stderr": 0.02469721693087894, "acc_norm": 0.6128205128205129, "acc_norm_stderr": 0.02469721693087894 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.3074074074074074, "acc_stderr": 0.028133252578815632, "acc_norm": 0.3074074074074074, "acc_norm_stderr": 0.028133252578815632 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.6260504201680672, "acc_stderr": 0.03142946637883708, "acc_norm": 0.6260504201680672, "acc_norm_stderr": 0.03142946637883708 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.3973509933774834, "acc_stderr": 0.0399552400768168, "acc_norm": 0.3973509933774834, "acc_norm_stderr": 0.0399552400768168 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.7614678899082569, "acc_stderr": 0.01827257581023188, "acc_norm": 0.7614678899082569, "acc_norm_stderr": 0.01827257581023188 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.4305555555555556, "acc_stderr": 0.03376922151252336, "acc_norm": 0.4305555555555556, "acc_norm_stderr": 0.03376922151252336 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.8480392156862745, "acc_stderr": 0.025195658428931792, "acc_norm": 0.8480392156862745, "acc_norm_stderr": 0.025195658428931792 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.8059071729957806, "acc_stderr": 0.025744902532290916, "acc_norm": 0.8059071729957806, "acc_norm_stderr": 0.025744902532290916 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.6860986547085202, "acc_stderr": 0.03114679648297246, "acc_norm": 0.6860986547085202, "acc_norm_stderr": 0.03114679648297246 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.6717557251908397, "acc_stderr": 0.041184385658062976, "acc_norm": 0.6717557251908397, "acc_norm_stderr": 0.041184385658062976 }, "harness|hendrycksTest-international_law|5": { "acc": 0.743801652892562, "acc_stderr": 0.03984979653302871, "acc_norm": 0.743801652892562, "acc_norm_stderr": 0.03984979653302871 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.6944444444444444, "acc_stderr": 0.04453197507374983, "acc_norm": 0.6944444444444444, "acc_norm_stderr": 0.04453197507374983 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.6932515337423313, "acc_stderr": 0.03623089915724147, "acc_norm": 0.6932515337423313, "acc_norm_stderr": 0.03623089915724147 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.41964285714285715, "acc_stderr": 0.04684099321077106, "acc_norm": 0.41964285714285715, "acc_norm_stderr": 0.04684099321077106 }, "harness|hendrycksTest-management|5": { "acc": 0.7475728155339806, "acc_stderr": 0.04301250399690878, "acc_norm": 0.7475728155339806, "acc_norm_stderr": 0.04301250399690878 }, "harness|hendrycksTest-marketing|5": { "acc": 0.8675213675213675, "acc_stderr": 0.022209309073165606, "acc_norm": 0.8675213675213675, "acc_norm_stderr": 0.022209309073165606 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.54, "acc_stderr": 0.05009082659620332, "acc_norm": 0.54, "acc_norm_stderr": 0.05009082659620332 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.7841634738186463, "acc_stderr": 0.014711684386139946, "acc_norm": 0.7841634738186463, "acc_norm_stderr": 0.014711684386139946 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.6763005780346821, "acc_stderr": 0.02519018132760842, "acc_norm": 0.6763005780346821, "acc_norm_stderr": 0.02519018132760842 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.4659217877094972, "acc_stderr": 0.016683615837486867, "acc_norm": 0.4659217877094972, "acc_norm_stderr": 0.016683615837486867 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.6339869281045751, "acc_stderr": 0.02758281141515962, "acc_norm": 0.6339869281045751, "acc_norm_stderr": 0.02758281141515962 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.6784565916398714, "acc_stderr": 0.026527724079528872, "acc_norm": 0.6784565916398714, "acc_norm_stderr": 0.026527724079528872 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.7376543209876543, "acc_stderr": 0.024477222856135118, "acc_norm": 0.7376543209876543, "acc_norm_stderr": 0.024477222856135118 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.4787234042553192, "acc_stderr": 0.029800481645628693, "acc_norm": 0.4787234042553192, "acc_norm_stderr": 0.029800481645628693 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.4517601043024772, "acc_stderr": 0.012710662233660247, "acc_norm": 0.4517601043024772, "acc_norm_stderr": 0.012710662233660247 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.5625, "acc_stderr": 0.030134614954403924, "acc_norm": 0.5625, "acc_norm_stderr": 0.030134614954403924 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.6062091503267973, "acc_stderr": 0.019766211991073066, "acc_norm": 0.6062091503267973, "acc_norm_stderr": 0.019766211991073066 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.5909090909090909, "acc_stderr": 0.04709306978661895, "acc_norm": 0.5909090909090909, "acc_norm_stderr": 0.04709306978661895 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.6612244897959184, "acc_stderr": 0.030299506562154188, "acc_norm": 0.6612244897959184, "acc_norm_stderr": 0.030299506562154188 }, "harness|hendrycksTest-sociology|5": { "acc": 0.7910447761194029, "acc_stderr": 0.028748298931728655, "acc_norm": 0.7910447761194029, "acc_norm_stderr": 0.028748298931728655 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.9, "acc_stderr": 0.030151134457776334, "acc_norm": 0.9, "acc_norm_stderr": 0.030151134457776334 }, "harness|hendrycksTest-virology|5": { "acc": 0.4759036144578313, "acc_stderr": 0.03887971849597264, "acc_norm": 0.4759036144578313, "acc_norm_stderr": 0.03887971849597264 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.7777777777777778, "acc_stderr": 0.03188578017686398, "acc_norm": 0.7777777777777778, "acc_norm_stderr": 0.03188578017686398 }, "harness|truthfulqa:mc|0": { "mc1": 0.36474908200734396, "mc1_stderr": 0.016850961061720116, "mc2": 0.513917774256522, "mc2_stderr": 0.014972875043047422 }, "harness|winogrande|5": { "acc": 0.7837411207576953, "acc_stderr": 0.01157061486140935 }, "harness|gsm8k|5": { "acc": 0.4040940106141016, "acc_stderr": 0.013516752972721717 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_karakuri-ai__karakuri-lm-70b-chat-v0.1
[ "region:us" ]
2024-02-02T06:01:03+00:00
{"pretty_name": "Evaluation run of karakuri-ai/karakuri-lm-70b-chat-v0.1", "dataset_summary": "Dataset automatically created during the evaluation run of model [karakuri-ai/karakuri-lm-70b-chat-v0.1](https://huggingface.co/karakuri-ai/karakuri-lm-70b-chat-v0.1) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_karakuri-ai__karakuri-lm-70b-chat-v0.1\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-02-02T05:58:35.536012](https://huggingface.co/datasets/open-llm-leaderboard/details_karakuri-ai__karakuri-lm-70b-chat-v0.1/blob/main/results_2024-02-02T05-58-35.536012.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.5939008681138529,\n \"acc_stderr\": 0.0331497474958087,\n \"acc_norm\": 0.5979307998153371,\n \"acc_norm_stderr\": 0.033826160874214035,\n \"mc1\": 0.36474908200734396,\n \"mc1_stderr\": 0.016850961061720116,\n \"mc2\": 0.513917774256522,\n \"mc2_stderr\": 0.014972875043047422\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.5716723549488054,\n \"acc_stderr\": 0.014460496367599017,\n \"acc_norm\": 0.6151877133105802,\n \"acc_norm_stderr\": 0.014218371065251104\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.637024497112129,\n \"acc_stderr\": 0.0047987512815608376,\n \"acc_norm\": 0.8313085042820155,\n \"acc_norm_stderr\": 0.003737138752336941\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.29,\n \"acc_stderr\": 0.045604802157206845,\n \"acc_norm\": 0.29,\n \"acc_norm_stderr\": 0.045604802157206845\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.4888888888888889,\n \"acc_stderr\": 0.04318275491977976,\n \"acc_norm\": 0.4888888888888889,\n \"acc_norm_stderr\": 0.04318275491977976\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.6710526315789473,\n \"acc_stderr\": 0.038234289699266046,\n \"acc_norm\": 0.6710526315789473,\n \"acc_norm_stderr\": 0.038234289699266046\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.6,\n \"acc_stderr\": 0.04923659639173309,\n \"acc_norm\": 0.6,\n \"acc_norm_stderr\": 0.04923659639173309\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.6188679245283019,\n \"acc_stderr\": 0.029890609686286644,\n \"acc_norm\": 0.6188679245283019,\n \"acc_norm_stderr\": 0.029890609686286644\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.6597222222222222,\n \"acc_stderr\": 0.039621355734862175,\n \"acc_norm\": 0.6597222222222222,\n \"acc_norm_stderr\": 0.039621355734862175\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.39,\n \"acc_stderr\": 0.04902071300001975,\n \"acc_norm\": 0.39,\n \"acc_norm_stderr\": 0.04902071300001975\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.46,\n \"acc_stderr\": 0.05009082659620332,\n \"acc_norm\": 0.46,\n \"acc_norm_stderr\": 0.05009082659620332\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.35,\n \"acc_stderr\": 0.0479372485441102,\n \"acc_norm\": 0.35,\n \"acc_norm_stderr\": 0.0479372485441102\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.5780346820809249,\n \"acc_stderr\": 0.0376574669386515,\n \"acc_norm\": 0.5780346820809249,\n \"acc_norm_stderr\": 0.0376574669386515\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.28431372549019607,\n \"acc_stderr\": 0.04488482852329017,\n \"acc_norm\": 0.28431372549019607,\n \"acc_norm_stderr\": 0.04488482852329017\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.64,\n \"acc_stderr\": 0.04824181513244218,\n \"acc_norm\": 0.64,\n \"acc_norm_stderr\": 0.04824181513244218\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.5106382978723404,\n \"acc_stderr\": 0.03267862331014063,\n \"acc_norm\": 0.5106382978723404,\n \"acc_norm_stderr\": 0.03267862331014063\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.41228070175438597,\n \"acc_stderr\": 0.046306532033665956,\n \"acc_norm\": 0.41228070175438597,\n \"acc_norm_stderr\": 0.046306532033665956\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.5448275862068965,\n \"acc_stderr\": 0.04149886942192118,\n \"acc_norm\": 0.5448275862068965,\n \"acc_norm_stderr\": 0.04149886942192118\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.37037037037037035,\n \"acc_stderr\": 0.024870815251057093,\n \"acc_norm\": 0.37037037037037035,\n \"acc_norm_stderr\": 0.024870815251057093\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.3492063492063492,\n \"acc_stderr\": 0.04263906892795132,\n \"acc_norm\": 0.3492063492063492,\n \"acc_norm_stderr\": 0.04263906892795132\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.41,\n \"acc_stderr\": 0.049431107042371025,\n \"acc_norm\": 0.41,\n \"acc_norm_stderr\": 0.049431107042371025\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.6774193548387096,\n \"acc_stderr\": 0.02659308451657227,\n \"acc_norm\": 0.6774193548387096,\n \"acc_norm_stderr\": 0.02659308451657227\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.3891625615763547,\n \"acc_stderr\": 0.034304624161038716,\n \"acc_norm\": 0.3891625615763547,\n \"acc_norm_stderr\": 0.034304624161038716\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.69,\n \"acc_stderr\": 0.04648231987117316,\n \"acc_norm\": 0.69,\n \"acc_norm_stderr\": 0.04648231987117316\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.7696969696969697,\n \"acc_stderr\": 0.0328766675860349,\n \"acc_norm\": 0.7696969696969697,\n \"acc_norm_stderr\": 0.0328766675860349\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.7424242424242424,\n \"acc_stderr\": 0.031156269519646836,\n \"acc_norm\": 0.7424242424242424,\n \"acc_norm_stderr\": 0.031156269519646836\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.8082901554404145,\n \"acc_stderr\": 0.028408953626245265,\n \"acc_norm\": 0.8082901554404145,\n \"acc_norm_stderr\": 0.028408953626245265\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.6128205128205129,\n \"acc_stderr\": 0.02469721693087894,\n \"acc_norm\": 0.6128205128205129,\n \"acc_norm_stderr\": 0.02469721693087894\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.3074074074074074,\n \"acc_stderr\": 0.028133252578815632,\n \"acc_norm\": 0.3074074074074074,\n \"acc_norm_stderr\": 0.028133252578815632\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.6260504201680672,\n \"acc_stderr\": 0.03142946637883708,\n \"acc_norm\": 0.6260504201680672,\n \"acc_norm_stderr\": 0.03142946637883708\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.3973509933774834,\n \"acc_stderr\": 0.0399552400768168,\n \"acc_norm\": 0.3973509933774834,\n \"acc_norm_stderr\": 0.0399552400768168\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.7614678899082569,\n \"acc_stderr\": 0.01827257581023188,\n \"acc_norm\": 0.7614678899082569,\n \"acc_norm_stderr\": 0.01827257581023188\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.4305555555555556,\n \"acc_stderr\": 0.03376922151252336,\n \"acc_norm\": 0.4305555555555556,\n \"acc_norm_stderr\": 0.03376922151252336\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.8480392156862745,\n \"acc_stderr\": 0.025195658428931792,\n \"acc_norm\": 0.8480392156862745,\n \"acc_norm_stderr\": 0.025195658428931792\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.8059071729957806,\n \"acc_stderr\": 0.025744902532290916,\n \"acc_norm\": 0.8059071729957806,\n \"acc_norm_stderr\": 0.025744902532290916\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.6860986547085202,\n \"acc_stderr\": 0.03114679648297246,\n \"acc_norm\": 0.6860986547085202,\n \"acc_norm_stderr\": 0.03114679648297246\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.6717557251908397,\n \"acc_stderr\": 0.041184385658062976,\n \"acc_norm\": 0.6717557251908397,\n \"acc_norm_stderr\": 0.041184385658062976\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.743801652892562,\n \"acc_stderr\": 0.03984979653302871,\n \"acc_norm\": 0.743801652892562,\n \"acc_norm_stderr\": 0.03984979653302871\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.6944444444444444,\n \"acc_stderr\": 0.04453197507374983,\n \"acc_norm\": 0.6944444444444444,\n \"acc_norm_stderr\": 0.04453197507374983\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.6932515337423313,\n \"acc_stderr\": 0.03623089915724147,\n \"acc_norm\": 0.6932515337423313,\n \"acc_norm_stderr\": 0.03623089915724147\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.41964285714285715,\n \"acc_stderr\": 0.04684099321077106,\n \"acc_norm\": 0.41964285714285715,\n \"acc_norm_stderr\": 0.04684099321077106\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.7475728155339806,\n \"acc_stderr\": 0.04301250399690878,\n \"acc_norm\": 0.7475728155339806,\n \"acc_norm_stderr\": 0.04301250399690878\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8675213675213675,\n \"acc_stderr\": 0.022209309073165606,\n \"acc_norm\": 0.8675213675213675,\n \"acc_norm_stderr\": 0.022209309073165606\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.54,\n \"acc_stderr\": 0.05009082659620332,\n \"acc_norm\": 0.54,\n \"acc_norm_stderr\": 0.05009082659620332\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.7841634738186463,\n \"acc_stderr\": 0.014711684386139946,\n \"acc_norm\": 0.7841634738186463,\n \"acc_norm_stderr\": 0.014711684386139946\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.6763005780346821,\n \"acc_stderr\": 0.02519018132760842,\n \"acc_norm\": 0.6763005780346821,\n \"acc_norm_stderr\": 0.02519018132760842\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.4659217877094972,\n \"acc_stderr\": 0.016683615837486867,\n \"acc_norm\": 0.4659217877094972,\n \"acc_norm_stderr\": 0.016683615837486867\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.6339869281045751,\n \"acc_stderr\": 0.02758281141515962,\n \"acc_norm\": 0.6339869281045751,\n \"acc_norm_stderr\": 0.02758281141515962\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.6784565916398714,\n \"acc_stderr\": 0.026527724079528872,\n \"acc_norm\": 0.6784565916398714,\n \"acc_norm_stderr\": 0.026527724079528872\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.7376543209876543,\n \"acc_stderr\": 0.024477222856135118,\n \"acc_norm\": 0.7376543209876543,\n \"acc_norm_stderr\": 0.024477222856135118\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.4787234042553192,\n \"acc_stderr\": 0.029800481645628693,\n \"acc_norm\": 0.4787234042553192,\n \"acc_norm_stderr\": 0.029800481645628693\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.4517601043024772,\n \"acc_stderr\": 0.012710662233660247,\n \"acc_norm\": 0.4517601043024772,\n \"acc_norm_stderr\": 0.012710662233660247\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.5625,\n \"acc_stderr\": 0.030134614954403924,\n \"acc_norm\": 0.5625,\n \"acc_norm_stderr\": 0.030134614954403924\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.6062091503267973,\n \"acc_stderr\": 0.019766211991073066,\n \"acc_norm\": 0.6062091503267973,\n \"acc_norm_stderr\": 0.019766211991073066\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.5909090909090909,\n \"acc_stderr\": 0.04709306978661895,\n \"acc_norm\": 0.5909090909090909,\n \"acc_norm_stderr\": 0.04709306978661895\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.6612244897959184,\n \"acc_stderr\": 0.030299506562154188,\n \"acc_norm\": 0.6612244897959184,\n \"acc_norm_stderr\": 0.030299506562154188\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.7910447761194029,\n \"acc_stderr\": 0.028748298931728655,\n \"acc_norm\": 0.7910447761194029,\n \"acc_norm_stderr\": 0.028748298931728655\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.9,\n \"acc_stderr\": 0.030151134457776334,\n \"acc_norm\": 0.9,\n \"acc_norm_stderr\": 0.030151134457776334\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.4759036144578313,\n \"acc_stderr\": 0.03887971849597264,\n \"acc_norm\": 0.4759036144578313,\n \"acc_norm_stderr\": 0.03887971849597264\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.7777777777777778,\n \"acc_stderr\": 0.03188578017686398,\n \"acc_norm\": 0.7777777777777778,\n \"acc_norm_stderr\": 0.03188578017686398\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.36474908200734396,\n \"mc1_stderr\": 0.016850961061720116,\n \"mc2\": 0.513917774256522,\n \"mc2_stderr\": 0.014972875043047422\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.7837411207576953,\n \"acc_stderr\": 0.01157061486140935\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.4040940106141016,\n \"acc_stderr\": 0.013516752972721717\n }\n}\n```", "repo_url": "https://huggingface.co/karakuri-ai/karakuri-lm-70b-chat-v0.1", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_02_02T05_58_35.536012", "path": ["**/details_harness|arc:challenge|25_2024-02-02T05-58-35.536012.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-02-02T05-58-35.536012.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_02_02T05_58_35.536012", "path": ["**/details_harness|gsm8k|5_2024-02-02T05-58-35.536012.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-02-02T05-58-35.536012.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_02_02T05_58_35.536012", "path": ["**/details_harness|hellaswag|10_2024-02-02T05-58-35.536012.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-02-02T05-58-35.536012.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_02_02T05_58_35.536012", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-02T05-58-35.536012.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-02-02T05-58-35.536012.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-02-02T05-58-35.536012.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-02-02T05-58-35.536012.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-02T05-58-35.536012.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-02-02T05-58-35.536012.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-02-02T05-58-35.536012.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-02-02T05-58-35.536012.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-02-02T05-58-35.536012.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-02-02T05-58-35.536012.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-02-02T05-58-35.536012.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-02-02T05-58-35.536012.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-02T05-58-35.536012.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-02-02T05-58-35.536012.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-02T05-58-35.536012.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-02T05-58-35.536012.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-02-02T05-58-35.536012.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-02-02T05-58-35.536012.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-02-02T05-58-35.536012.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-02T05-58-35.536012.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-02T05-58-35.536012.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-02T05-58-35.536012.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-02-02T05-58-35.536012.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-02T05-58-35.536012.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-02T05-58-35.536012.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-02T05-58-35.536012.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-02T05-58-35.536012.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-02-02T05-58-35.536012.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-02T05-58-35.536012.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-02T05-58-35.536012.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-02T05-58-35.536012.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-02T05-58-35.536012.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-02-02T05-58-35.536012.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-02-02T05-58-35.536012.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-02-02T05-58-35.536012.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-02-02T05-58-35.536012.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-02T05-58-35.536012.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-02-02T05-58-35.536012.parquet", "**/details_harness|hendrycksTest-management|5_2024-02-02T05-58-35.536012.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-02-02T05-58-35.536012.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-02-02T05-58-35.536012.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-02-02T05-58-35.536012.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-02-02T05-58-35.536012.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-02T05-58-35.536012.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-02-02T05-58-35.536012.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-02-02T05-58-35.536012.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-02-02T05-58-35.536012.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-02-02T05-58-35.536012.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-02-02T05-58-35.536012.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-02-02T05-58-35.536012.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-02-02T05-58-35.536012.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-02-02T05-58-35.536012.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-02-02T05-58-35.536012.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-02-02T05-58-35.536012.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-02T05-58-35.536012.parquet", "**/details_harness|hendrycksTest-virology|5_2024-02-02T05-58-35.536012.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-02-02T05-58-35.536012.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-02T05-58-35.536012.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-02-02T05-58-35.536012.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-02-02T05-58-35.536012.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-02-02T05-58-35.536012.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-02T05-58-35.536012.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-02-02T05-58-35.536012.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-02-02T05-58-35.536012.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-02-02T05-58-35.536012.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-02-02T05-58-35.536012.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-02-02T05-58-35.536012.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-02-02T05-58-35.536012.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-02-02T05-58-35.536012.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-02T05-58-35.536012.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-02-02T05-58-35.536012.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-02T05-58-35.536012.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-02T05-58-35.536012.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-02-02T05-58-35.536012.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-02-02T05-58-35.536012.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-02-02T05-58-35.536012.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-02T05-58-35.536012.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-02T05-58-35.536012.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-02T05-58-35.536012.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-02-02T05-58-35.536012.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-02T05-58-35.536012.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-02T05-58-35.536012.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-02T05-58-35.536012.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-02T05-58-35.536012.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-02-02T05-58-35.536012.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-02T05-58-35.536012.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-02T05-58-35.536012.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-02T05-58-35.536012.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-02T05-58-35.536012.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-02-02T05-58-35.536012.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-02-02T05-58-35.536012.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-02-02T05-58-35.536012.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-02-02T05-58-35.536012.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-02T05-58-35.536012.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-02-02T05-58-35.536012.parquet", "**/details_harness|hendrycksTest-management|5_2024-02-02T05-58-35.536012.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-02-02T05-58-35.536012.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-02-02T05-58-35.536012.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-02-02T05-58-35.536012.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-02-02T05-58-35.536012.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-02T05-58-35.536012.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-02-02T05-58-35.536012.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-02-02T05-58-35.536012.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-02-02T05-58-35.536012.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-02-02T05-58-35.536012.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-02-02T05-58-35.536012.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-02-02T05-58-35.536012.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-02-02T05-58-35.536012.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-02-02T05-58-35.536012.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-02-02T05-58-35.536012.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-02-02T05-58-35.536012.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-02T05-58-35.536012.parquet", "**/details_harness|hendrycksTest-virology|5_2024-02-02T05-58-35.536012.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-02-02T05-58-35.536012.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_02_02T05_58_35.536012", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-02T05-58-35.536012.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-02T05-58-35.536012.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_02_02T05_58_35.536012", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-02-02T05-58-35.536012.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-02-02T05-58-35.536012.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_02_02T05_58_35.536012", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-02-02T05-58-35.536012.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-02-02T05-58-35.536012.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_02_02T05_58_35.536012", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-02-02T05-58-35.536012.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-02-02T05-58-35.536012.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_02_02T05_58_35.536012", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-02T05-58-35.536012.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-02T05-58-35.536012.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_02_02T05_58_35.536012", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-02-02T05-58-35.536012.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-02-02T05-58-35.536012.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_02_02T05_58_35.536012", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-02-02T05-58-35.536012.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-02-02T05-58-35.536012.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_02_02T05_58_35.536012", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-02-02T05-58-35.536012.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-02-02T05-58-35.536012.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_02_02T05_58_35.536012", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-02-02T05-58-35.536012.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-02-02T05-58-35.536012.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_02_02T05_58_35.536012", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-02-02T05-58-35.536012.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-02-02T05-58-35.536012.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_02_02T05_58_35.536012", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-02-02T05-58-35.536012.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-02-02T05-58-35.536012.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_02_02T05_58_35.536012", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-02-02T05-58-35.536012.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-02-02T05-58-35.536012.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_02_02T05_58_35.536012", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-02T05-58-35.536012.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-02T05-58-35.536012.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_02_02T05_58_35.536012", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-02-02T05-58-35.536012.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-02-02T05-58-35.536012.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_02_02T05_58_35.536012", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-02T05-58-35.536012.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-02T05-58-35.536012.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_02_02T05_58_35.536012", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-02T05-58-35.536012.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-02T05-58-35.536012.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_02_02T05_58_35.536012", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-02-02T05-58-35.536012.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-02-02T05-58-35.536012.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_02_02T05_58_35.536012", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-02-02T05-58-35.536012.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-02-02T05-58-35.536012.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_02_02T05_58_35.536012", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-02-02T05-58-35.536012.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-02-02T05-58-35.536012.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_02_02T05_58_35.536012", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-02T05-58-35.536012.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-02T05-58-35.536012.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_02_02T05_58_35.536012", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-02T05-58-35.536012.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-02T05-58-35.536012.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_02_02T05_58_35.536012", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-02T05-58-35.536012.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-02T05-58-35.536012.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_02_02T05_58_35.536012", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-02-02T05-58-35.536012.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-02-02T05-58-35.536012.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_02_02T05_58_35.536012", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-02T05-58-35.536012.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-02T05-58-35.536012.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_02_02T05_58_35.536012", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-02T05-58-35.536012.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-02T05-58-35.536012.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_02_02T05_58_35.536012", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-02T05-58-35.536012.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-02T05-58-35.536012.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_02_02T05_58_35.536012", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-02T05-58-35.536012.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-02T05-58-35.536012.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_02_02T05_58_35.536012", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-02-02T05-58-35.536012.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-02-02T05-58-35.536012.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_02_02T05_58_35.536012", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-02T05-58-35.536012.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-02T05-58-35.536012.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_02_02T05_58_35.536012", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-02T05-58-35.536012.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-02T05-58-35.536012.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_02_02T05_58_35.536012", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-02T05-58-35.536012.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-02T05-58-35.536012.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_02_02T05_58_35.536012", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-02T05-58-35.536012.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-02T05-58-35.536012.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_02_02T05_58_35.536012", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-02-02T05-58-35.536012.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-02-02T05-58-35.536012.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_02_02T05_58_35.536012", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-02-02T05-58-35.536012.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-02-02T05-58-35.536012.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_02_02T05_58_35.536012", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-02-02T05-58-35.536012.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-02-02T05-58-35.536012.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_02_02T05_58_35.536012", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-02-02T05-58-35.536012.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-02-02T05-58-35.536012.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_02_02T05_58_35.536012", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-02T05-58-35.536012.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-02T05-58-35.536012.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_02_02T05_58_35.536012", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-02-02T05-58-35.536012.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-02-02T05-58-35.536012.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_02_02T05_58_35.536012", "path": ["**/details_harness|hendrycksTest-management|5_2024-02-02T05-58-35.536012.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-02-02T05-58-35.536012.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_02_02T05_58_35.536012", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-02-02T05-58-35.536012.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-02-02T05-58-35.536012.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_02_02T05_58_35.536012", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-02-02T05-58-35.536012.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-02-02T05-58-35.536012.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_02_02T05_58_35.536012", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-02-02T05-58-35.536012.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-02-02T05-58-35.536012.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_02_02T05_58_35.536012", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-02-02T05-58-35.536012.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-02-02T05-58-35.536012.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_02_02T05_58_35.536012", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-02T05-58-35.536012.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-02T05-58-35.536012.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_02_02T05_58_35.536012", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-02-02T05-58-35.536012.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-02-02T05-58-35.536012.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_02_02T05_58_35.536012", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-02-02T05-58-35.536012.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-02-02T05-58-35.536012.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_02_02T05_58_35.536012", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-02-02T05-58-35.536012.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-02-02T05-58-35.536012.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_02_02T05_58_35.536012", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-02-02T05-58-35.536012.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-02-02T05-58-35.536012.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_02_02T05_58_35.536012", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-02-02T05-58-35.536012.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-02-02T05-58-35.536012.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_02_02T05_58_35.536012", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-02-02T05-58-35.536012.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-02-02T05-58-35.536012.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_02_02T05_58_35.536012", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-02-02T05-58-35.536012.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-02-02T05-58-35.536012.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_02_02T05_58_35.536012", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-02-02T05-58-35.536012.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-02-02T05-58-35.536012.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_02_02T05_58_35.536012", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-02-02T05-58-35.536012.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-02-02T05-58-35.536012.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_02_02T05_58_35.536012", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-02-02T05-58-35.536012.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-02-02T05-58-35.536012.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_02_02T05_58_35.536012", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-02T05-58-35.536012.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-02T05-58-35.536012.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_02_02T05_58_35.536012", "path": ["**/details_harness|hendrycksTest-virology|5_2024-02-02T05-58-35.536012.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-02-02T05-58-35.536012.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_02_02T05_58_35.536012", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-02-02T05-58-35.536012.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-02-02T05-58-35.536012.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_02_02T05_58_35.536012", "path": ["**/details_harness|truthfulqa:mc|0_2024-02-02T05-58-35.536012.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-02-02T05-58-35.536012.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_02_02T05_58_35.536012", "path": ["**/details_harness|winogrande|5_2024-02-02T05-58-35.536012.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-02-02T05-58-35.536012.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_02_02T05_58_35.536012", "path": ["results_2024-02-02T05-58-35.536012.parquet"]}, {"split": "latest", "path": ["results_2024-02-02T05-58-35.536012.parquet"]}]}]}
2024-02-02T06:01:26+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of karakuri-ai/karakuri-lm-70b-chat-v0.1 Dataset automatically created during the evaluation run of model karakuri-ai/karakuri-lm-70b-chat-v0.1 on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-02-02T05:58:35.536012(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of karakuri-ai/karakuri-lm-70b-chat-v0.1\n\n\n\nDataset automatically created during the evaluation run of model karakuri-ai/karakuri-lm-70b-chat-v0.1 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-02-02T05:58:35.536012(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of karakuri-ai/karakuri-lm-70b-chat-v0.1\n\n\n\nDataset automatically created during the evaluation run of model karakuri-ai/karakuri-lm-70b-chat-v0.1 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-02-02T05:58:35.536012(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
17dd9d8700f8f0c26942b82c60b2b6f518cc49d2
# Dataset Card for Evaluation run of ChuckMcSneed/SMaxxxer-v1-70b <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [ChuckMcSneed/SMaxxxer-v1-70b](https://huggingface.co/ChuckMcSneed/SMaxxxer-v1-70b) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_ChuckMcSneed__SMaxxxer-v1-70b", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-02-02T06:12:15.180411](https://huggingface.co/datasets/open-llm-leaderboard/details_ChuckMcSneed__SMaxxxer-v1-70b/blob/main/results_2024-02-02T06-12-15.180411.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.7046050402424828, "acc_stderr": 0.03004645057227282, "acc_norm": 0.7084589909997574, "acc_norm_stderr": 0.03062387199604588, "mc1": 0.423500611995104, "mc1_stderr": 0.017297421448534727, "mc2": 0.6069766843815353, "mc2_stderr": 0.01472043739247153 }, "harness|arc:challenge|25": { "acc": 0.659556313993174, "acc_stderr": 0.013847460518892976, "acc_norm": 0.7064846416382252, "acc_norm_stderr": 0.013307250444941113 }, "harness|hellaswag|10": { "acc": 0.6744672376020713, "acc_stderr": 0.004676159299105418, "acc_norm": 0.8802031467835093, "acc_norm_stderr": 0.003240601883180498 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.34, "acc_stderr": 0.04760952285695236, "acc_norm": 0.34, "acc_norm_stderr": 0.04760952285695236 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.6518518518518519, "acc_stderr": 0.041153246103369526, "acc_norm": 0.6518518518518519, "acc_norm_stderr": 0.041153246103369526 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.8289473684210527, "acc_stderr": 0.030643607071677084, "acc_norm": 0.8289473684210527, "acc_norm_stderr": 0.030643607071677084 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.76, "acc_stderr": 0.04292346959909283, "acc_norm": 0.76, "acc_norm_stderr": 0.04292346959909283 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.7358490566037735, "acc_stderr": 0.027134291628741702, "acc_norm": 0.7358490566037735, "acc_norm_stderr": 0.027134291628741702 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.8333333333333334, "acc_stderr": 0.031164899666948617, "acc_norm": 0.8333333333333334, "acc_norm_stderr": 0.031164899666948617 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.45, "acc_stderr": 0.04999999999999999, "acc_norm": 0.45, "acc_norm_stderr": 0.04999999999999999 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.59, "acc_stderr": 0.04943110704237102, "acc_norm": 0.59, "acc_norm_stderr": 0.04943110704237102 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.39, "acc_stderr": 0.04902071300001975, "acc_norm": 0.39, "acc_norm_stderr": 0.04902071300001975 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.6763005780346821, "acc_stderr": 0.0356760379963917, "acc_norm": 0.6763005780346821, "acc_norm_stderr": 0.0356760379963917 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.4019607843137255, "acc_stderr": 0.04878608714466996, "acc_norm": 0.4019607843137255, "acc_norm_stderr": 0.04878608714466996 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.78, "acc_stderr": 0.04163331998932262, "acc_norm": 0.78, "acc_norm_stderr": 0.04163331998932262 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.6893617021276596, "acc_stderr": 0.03025123757921317, "acc_norm": 0.6893617021276596, "acc_norm_stderr": 0.03025123757921317 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.4298245614035088, "acc_stderr": 0.046570472605949625, "acc_norm": 0.4298245614035088, "acc_norm_stderr": 0.046570472605949625 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.6137931034482759, "acc_stderr": 0.04057324734419036, "acc_norm": 0.6137931034482759, "acc_norm_stderr": 0.04057324734419036 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.47354497354497355, "acc_stderr": 0.02571523981134676, "acc_norm": 0.47354497354497355, "acc_norm_stderr": 0.02571523981134676 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.48412698412698413, "acc_stderr": 0.04469881854072606, "acc_norm": 0.48412698412698413, "acc_norm_stderr": 0.04469881854072606 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.52, "acc_stderr": 0.050211673156867795, "acc_norm": 0.52, "acc_norm_stderr": 0.050211673156867795 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.8193548387096774, "acc_stderr": 0.021886178567172534, "acc_norm": 0.8193548387096774, "acc_norm_stderr": 0.021886178567172534 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.5714285714285714, "acc_stderr": 0.034819048444388045, "acc_norm": 0.5714285714285714, "acc_norm_stderr": 0.034819048444388045 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.8, "acc_stderr": 0.04020151261036846, "acc_norm": 0.8, "acc_norm_stderr": 0.04020151261036846 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.8363636363636363, "acc_stderr": 0.02888787239548795, "acc_norm": 0.8363636363636363, "acc_norm_stderr": 0.02888787239548795 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.8888888888888888, "acc_stderr": 0.02239078763821677, "acc_norm": 0.8888888888888888, "acc_norm_stderr": 0.02239078763821677 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.9378238341968912, "acc_stderr": 0.017426974154240524, "acc_norm": 0.9378238341968912, "acc_norm_stderr": 0.017426974154240524 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.7205128205128205, "acc_stderr": 0.022752388839776823, "acc_norm": 0.7205128205128205, "acc_norm_stderr": 0.022752388839776823 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.3111111111111111, "acc_stderr": 0.028226446749683522, "acc_norm": 0.3111111111111111, "acc_norm_stderr": 0.028226446749683522 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.7689075630252101, "acc_stderr": 0.027381406927868886, "acc_norm": 0.7689075630252101, "acc_norm_stderr": 0.027381406927868886 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.4900662251655629, "acc_stderr": 0.04081677107248436, "acc_norm": 0.4900662251655629, "acc_norm_stderr": 0.04081677107248436 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.8935779816513761, "acc_stderr": 0.013221554674594372, "acc_norm": 0.8935779816513761, "acc_norm_stderr": 0.013221554674594372 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.5833333333333334, "acc_stderr": 0.03362277436608043, "acc_norm": 0.5833333333333334, "acc_norm_stderr": 0.03362277436608043 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.9264705882352942, "acc_stderr": 0.018318855850089678, "acc_norm": 0.9264705882352942, "acc_norm_stderr": 0.018318855850089678 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.8987341772151899, "acc_stderr": 0.019637720526065494, "acc_norm": 0.8987341772151899, "acc_norm_stderr": 0.019637720526065494 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.8026905829596412, "acc_stderr": 0.02670985334496796, "acc_norm": 0.8026905829596412, "acc_norm_stderr": 0.02670985334496796 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.8854961832061069, "acc_stderr": 0.027927473753597446, "acc_norm": 0.8854961832061069, "acc_norm_stderr": 0.027927473753597446 }, "harness|hendrycksTest-international_law|5": { "acc": 0.859504132231405, "acc_stderr": 0.03172233426002158, "acc_norm": 0.859504132231405, "acc_norm_stderr": 0.03172233426002158 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.8240740740740741, "acc_stderr": 0.036809181416738807, "acc_norm": 0.8240740740740741, "acc_norm_stderr": 0.036809181416738807 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.8466257668711656, "acc_stderr": 0.028311601441438603, "acc_norm": 0.8466257668711656, "acc_norm_stderr": 0.028311601441438603 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.48214285714285715, "acc_stderr": 0.047427623612430116, "acc_norm": 0.48214285714285715, "acc_norm_stderr": 0.047427623612430116 }, "harness|hendrycksTest-management|5": { "acc": 0.8252427184466019, "acc_stderr": 0.037601780060266196, "acc_norm": 0.8252427184466019, "acc_norm_stderr": 0.037601780060266196 }, "harness|hendrycksTest-marketing|5": { "acc": 0.8846153846153846, "acc_stderr": 0.02093019318517933, "acc_norm": 0.8846153846153846, "acc_norm_stderr": 0.02093019318517933 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.69, "acc_stderr": 0.04648231987117316, "acc_norm": 0.69, "acc_norm_stderr": 0.04648231987117316 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.8671775223499362, "acc_stderr": 0.012136303209884566, "acc_norm": 0.8671775223499362, "acc_norm_stderr": 0.012136303209884566 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.8063583815028902, "acc_stderr": 0.021274230317515568, "acc_norm": 0.8063583815028902, "acc_norm_stderr": 0.021274230317515568 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.5743016759776536, "acc_stderr": 0.01653682964899712, "acc_norm": 0.5743016759776536, "acc_norm_stderr": 0.01653682964899712 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.7549019607843137, "acc_stderr": 0.024630048979824768, "acc_norm": 0.7549019607843137, "acc_norm_stderr": 0.024630048979824768 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.7813504823151125, "acc_stderr": 0.02347558141786111, "acc_norm": 0.7813504823151125, "acc_norm_stderr": 0.02347558141786111 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.8333333333333334, "acc_stderr": 0.02073635840806, "acc_norm": 0.8333333333333334, "acc_norm_stderr": 0.02073635840806 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.5709219858156028, "acc_stderr": 0.029525914302558562, "acc_norm": 0.5709219858156028, "acc_norm_stderr": 0.029525914302558562 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.560625814863103, "acc_stderr": 0.012676014778580215, "acc_norm": 0.560625814863103, "acc_norm_stderr": 0.012676014778580215 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.7169117647058824, "acc_stderr": 0.02736586113151381, "acc_norm": 0.7169117647058824, "acc_norm_stderr": 0.02736586113151381 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.7696078431372549, "acc_stderr": 0.017035229258034034, "acc_norm": 0.7696078431372549, "acc_norm_stderr": 0.017035229258034034 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.7363636363636363, "acc_stderr": 0.04220224692971987, "acc_norm": 0.7363636363636363, "acc_norm_stderr": 0.04220224692971987 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.8122448979591836, "acc_stderr": 0.025000256039546188, "acc_norm": 0.8122448979591836, "acc_norm_stderr": 0.025000256039546188 }, "harness|hendrycksTest-sociology|5": { "acc": 0.9054726368159204, "acc_stderr": 0.020687186951534094, "acc_norm": 0.9054726368159204, "acc_norm_stderr": 0.020687186951534094 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.92, "acc_stderr": 0.0272659924344291, "acc_norm": 0.92, "acc_norm_stderr": 0.0272659924344291 }, "harness|hendrycksTest-virology|5": { "acc": 0.5180722891566265, "acc_stderr": 0.03889951252827216, "acc_norm": 0.5180722891566265, "acc_norm_stderr": 0.03889951252827216 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.8888888888888888, "acc_stderr": 0.02410338420207286, "acc_norm": 0.8888888888888888, "acc_norm_stderr": 0.02410338420207286 }, "harness|truthfulqa:mc|0": { "mc1": 0.423500611995104, "mc1_stderr": 0.017297421448534727, "mc2": 0.6069766843815353, "mc2_stderr": 0.01472043739247153 }, "harness|winogrande|5": { "acc": 0.8287292817679558, "acc_stderr": 0.010588417294962526 }, "harness|gsm8k|5": { "acc": 0.6057619408642911, "acc_stderr": 0.013460852357095675 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_ChuckMcSneed__SMaxxxer-v1-70b
[ "region:us" ]
2024-02-02T06:14:40+00:00
{"pretty_name": "Evaluation run of ChuckMcSneed/SMaxxxer-v1-70b", "dataset_summary": "Dataset automatically created during the evaluation run of model [ChuckMcSneed/SMaxxxer-v1-70b](https://huggingface.co/ChuckMcSneed/SMaxxxer-v1-70b) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_ChuckMcSneed__SMaxxxer-v1-70b\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-02-02T06:12:15.180411](https://huggingface.co/datasets/open-llm-leaderboard/details_ChuckMcSneed__SMaxxxer-v1-70b/blob/main/results_2024-02-02T06-12-15.180411.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.7046050402424828,\n \"acc_stderr\": 0.03004645057227282,\n \"acc_norm\": 0.7084589909997574,\n \"acc_norm_stderr\": 0.03062387199604588,\n \"mc1\": 0.423500611995104,\n \"mc1_stderr\": 0.017297421448534727,\n \"mc2\": 0.6069766843815353,\n \"mc2_stderr\": 0.01472043739247153\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.659556313993174,\n \"acc_stderr\": 0.013847460518892976,\n \"acc_norm\": 0.7064846416382252,\n \"acc_norm_stderr\": 0.013307250444941113\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.6744672376020713,\n \"acc_stderr\": 0.004676159299105418,\n \"acc_norm\": 0.8802031467835093,\n \"acc_norm_stderr\": 0.003240601883180498\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.34,\n \"acc_stderr\": 0.04760952285695236,\n \"acc_norm\": 0.34,\n \"acc_norm_stderr\": 0.04760952285695236\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.6518518518518519,\n \"acc_stderr\": 0.041153246103369526,\n \"acc_norm\": 0.6518518518518519,\n \"acc_norm_stderr\": 0.041153246103369526\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.8289473684210527,\n \"acc_stderr\": 0.030643607071677084,\n \"acc_norm\": 0.8289473684210527,\n \"acc_norm_stderr\": 0.030643607071677084\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.76,\n \"acc_stderr\": 0.04292346959909283,\n \"acc_norm\": 0.76,\n \"acc_norm_stderr\": 0.04292346959909283\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.7358490566037735,\n \"acc_stderr\": 0.027134291628741702,\n \"acc_norm\": 0.7358490566037735,\n \"acc_norm_stderr\": 0.027134291628741702\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.8333333333333334,\n \"acc_stderr\": 0.031164899666948617,\n \"acc_norm\": 0.8333333333333334,\n \"acc_norm_stderr\": 0.031164899666948617\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.45,\n \"acc_stderr\": 0.04999999999999999,\n \"acc_norm\": 0.45,\n \"acc_norm_stderr\": 0.04999999999999999\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.59,\n \"acc_stderr\": 0.04943110704237102,\n \"acc_norm\": 0.59,\n \"acc_norm_stderr\": 0.04943110704237102\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.39,\n \"acc_stderr\": 0.04902071300001975,\n \"acc_norm\": 0.39,\n \"acc_norm_stderr\": 0.04902071300001975\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.6763005780346821,\n \"acc_stderr\": 0.0356760379963917,\n \"acc_norm\": 0.6763005780346821,\n \"acc_norm_stderr\": 0.0356760379963917\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.4019607843137255,\n \"acc_stderr\": 0.04878608714466996,\n \"acc_norm\": 0.4019607843137255,\n \"acc_norm_stderr\": 0.04878608714466996\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.78,\n \"acc_stderr\": 0.04163331998932262,\n \"acc_norm\": 0.78,\n \"acc_norm_stderr\": 0.04163331998932262\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.6893617021276596,\n \"acc_stderr\": 0.03025123757921317,\n \"acc_norm\": 0.6893617021276596,\n \"acc_norm_stderr\": 0.03025123757921317\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.4298245614035088,\n \"acc_stderr\": 0.046570472605949625,\n \"acc_norm\": 0.4298245614035088,\n \"acc_norm_stderr\": 0.046570472605949625\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.6137931034482759,\n \"acc_stderr\": 0.04057324734419036,\n \"acc_norm\": 0.6137931034482759,\n \"acc_norm_stderr\": 0.04057324734419036\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.47354497354497355,\n \"acc_stderr\": 0.02571523981134676,\n \"acc_norm\": 0.47354497354497355,\n \"acc_norm_stderr\": 0.02571523981134676\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.48412698412698413,\n \"acc_stderr\": 0.04469881854072606,\n \"acc_norm\": 0.48412698412698413,\n \"acc_norm_stderr\": 0.04469881854072606\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.52,\n \"acc_stderr\": 0.050211673156867795,\n \"acc_norm\": 0.52,\n \"acc_norm_stderr\": 0.050211673156867795\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.8193548387096774,\n \"acc_stderr\": 0.021886178567172534,\n \"acc_norm\": 0.8193548387096774,\n \"acc_norm_stderr\": 0.021886178567172534\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.5714285714285714,\n \"acc_stderr\": 0.034819048444388045,\n \"acc_norm\": 0.5714285714285714,\n \"acc_norm_stderr\": 0.034819048444388045\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.8,\n \"acc_stderr\": 0.04020151261036846,\n \"acc_norm\": 0.8,\n \"acc_norm_stderr\": 0.04020151261036846\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.8363636363636363,\n \"acc_stderr\": 0.02888787239548795,\n \"acc_norm\": 0.8363636363636363,\n \"acc_norm_stderr\": 0.02888787239548795\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.8888888888888888,\n \"acc_stderr\": 0.02239078763821677,\n \"acc_norm\": 0.8888888888888888,\n \"acc_norm_stderr\": 0.02239078763821677\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.9378238341968912,\n \"acc_stderr\": 0.017426974154240524,\n \"acc_norm\": 0.9378238341968912,\n \"acc_norm_stderr\": 0.017426974154240524\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.7205128205128205,\n \"acc_stderr\": 0.022752388839776823,\n \"acc_norm\": 0.7205128205128205,\n \"acc_norm_stderr\": 0.022752388839776823\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.3111111111111111,\n \"acc_stderr\": 0.028226446749683522,\n \"acc_norm\": 0.3111111111111111,\n \"acc_norm_stderr\": 0.028226446749683522\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.7689075630252101,\n \"acc_stderr\": 0.027381406927868886,\n \"acc_norm\": 0.7689075630252101,\n \"acc_norm_stderr\": 0.027381406927868886\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.4900662251655629,\n \"acc_stderr\": 0.04081677107248436,\n \"acc_norm\": 0.4900662251655629,\n \"acc_norm_stderr\": 0.04081677107248436\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.8935779816513761,\n \"acc_stderr\": 0.013221554674594372,\n \"acc_norm\": 0.8935779816513761,\n \"acc_norm_stderr\": 0.013221554674594372\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.5833333333333334,\n \"acc_stderr\": 0.03362277436608043,\n \"acc_norm\": 0.5833333333333334,\n \"acc_norm_stderr\": 0.03362277436608043\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.9264705882352942,\n \"acc_stderr\": 0.018318855850089678,\n \"acc_norm\": 0.9264705882352942,\n \"acc_norm_stderr\": 0.018318855850089678\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.8987341772151899,\n \"acc_stderr\": 0.019637720526065494,\n \"acc_norm\": 0.8987341772151899,\n \"acc_norm_stderr\": 0.019637720526065494\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.8026905829596412,\n \"acc_stderr\": 0.02670985334496796,\n \"acc_norm\": 0.8026905829596412,\n \"acc_norm_stderr\": 0.02670985334496796\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.8854961832061069,\n \"acc_stderr\": 0.027927473753597446,\n \"acc_norm\": 0.8854961832061069,\n \"acc_norm_stderr\": 0.027927473753597446\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.859504132231405,\n \"acc_stderr\": 0.03172233426002158,\n \"acc_norm\": 0.859504132231405,\n \"acc_norm_stderr\": 0.03172233426002158\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.8240740740740741,\n \"acc_stderr\": 0.036809181416738807,\n \"acc_norm\": 0.8240740740740741,\n \"acc_norm_stderr\": 0.036809181416738807\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.8466257668711656,\n \"acc_stderr\": 0.028311601441438603,\n \"acc_norm\": 0.8466257668711656,\n \"acc_norm_stderr\": 0.028311601441438603\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.48214285714285715,\n \"acc_stderr\": 0.047427623612430116,\n \"acc_norm\": 0.48214285714285715,\n \"acc_norm_stderr\": 0.047427623612430116\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.8252427184466019,\n \"acc_stderr\": 0.037601780060266196,\n \"acc_norm\": 0.8252427184466019,\n \"acc_norm_stderr\": 0.037601780060266196\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8846153846153846,\n \"acc_stderr\": 0.02093019318517933,\n \"acc_norm\": 0.8846153846153846,\n \"acc_norm_stderr\": 0.02093019318517933\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.69,\n \"acc_stderr\": 0.04648231987117316,\n \"acc_norm\": 0.69,\n \"acc_norm_stderr\": 0.04648231987117316\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.8671775223499362,\n \"acc_stderr\": 0.012136303209884566,\n \"acc_norm\": 0.8671775223499362,\n \"acc_norm_stderr\": 0.012136303209884566\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.8063583815028902,\n \"acc_stderr\": 0.021274230317515568,\n \"acc_norm\": 0.8063583815028902,\n \"acc_norm_stderr\": 0.021274230317515568\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.5743016759776536,\n \"acc_stderr\": 0.01653682964899712,\n \"acc_norm\": 0.5743016759776536,\n \"acc_norm_stderr\": 0.01653682964899712\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.7549019607843137,\n \"acc_stderr\": 0.024630048979824768,\n \"acc_norm\": 0.7549019607843137,\n \"acc_norm_stderr\": 0.024630048979824768\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.7813504823151125,\n \"acc_stderr\": 0.02347558141786111,\n \"acc_norm\": 0.7813504823151125,\n \"acc_norm_stderr\": 0.02347558141786111\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.8333333333333334,\n \"acc_stderr\": 0.02073635840806,\n \"acc_norm\": 0.8333333333333334,\n \"acc_norm_stderr\": 0.02073635840806\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.5709219858156028,\n \"acc_stderr\": 0.029525914302558562,\n \"acc_norm\": 0.5709219858156028,\n \"acc_norm_stderr\": 0.029525914302558562\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.560625814863103,\n \"acc_stderr\": 0.012676014778580215,\n \"acc_norm\": 0.560625814863103,\n \"acc_norm_stderr\": 0.012676014778580215\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.7169117647058824,\n \"acc_stderr\": 0.02736586113151381,\n \"acc_norm\": 0.7169117647058824,\n \"acc_norm_stderr\": 0.02736586113151381\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.7696078431372549,\n \"acc_stderr\": 0.017035229258034034,\n \"acc_norm\": 0.7696078431372549,\n \"acc_norm_stderr\": 0.017035229258034034\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.7363636363636363,\n \"acc_stderr\": 0.04220224692971987,\n \"acc_norm\": 0.7363636363636363,\n \"acc_norm_stderr\": 0.04220224692971987\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.8122448979591836,\n \"acc_stderr\": 0.025000256039546188,\n \"acc_norm\": 0.8122448979591836,\n \"acc_norm_stderr\": 0.025000256039546188\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.9054726368159204,\n \"acc_stderr\": 0.020687186951534094,\n \"acc_norm\": 0.9054726368159204,\n \"acc_norm_stderr\": 0.020687186951534094\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.92,\n \"acc_stderr\": 0.0272659924344291,\n \"acc_norm\": 0.92,\n \"acc_norm_stderr\": 0.0272659924344291\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.5180722891566265,\n \"acc_stderr\": 0.03889951252827216,\n \"acc_norm\": 0.5180722891566265,\n \"acc_norm_stderr\": 0.03889951252827216\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.8888888888888888,\n \"acc_stderr\": 0.02410338420207286,\n \"acc_norm\": 0.8888888888888888,\n \"acc_norm_stderr\": 0.02410338420207286\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.423500611995104,\n \"mc1_stderr\": 0.017297421448534727,\n \"mc2\": 0.6069766843815353,\n \"mc2_stderr\": 0.01472043739247153\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.8287292817679558,\n \"acc_stderr\": 0.010588417294962526\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.6057619408642911,\n \"acc_stderr\": 0.013460852357095675\n }\n}\n```", "repo_url": "https://huggingface.co/ChuckMcSneed/SMaxxxer-v1-70b", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_02_02T06_12_15.180411", "path": ["**/details_harness|arc:challenge|25_2024-02-02T06-12-15.180411.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-02-02T06-12-15.180411.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_02_02T06_12_15.180411", "path": ["**/details_harness|gsm8k|5_2024-02-02T06-12-15.180411.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-02-02T06-12-15.180411.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_02_02T06_12_15.180411", "path": ["**/details_harness|hellaswag|10_2024-02-02T06-12-15.180411.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-02-02T06-12-15.180411.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_02_02T06_12_15.180411", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-02T06-12-15.180411.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-02-02T06-12-15.180411.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-02-02T06-12-15.180411.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-02-02T06-12-15.180411.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-02T06-12-15.180411.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-02-02T06-12-15.180411.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-02-02T06-12-15.180411.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-02-02T06-12-15.180411.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-02-02T06-12-15.180411.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-02-02T06-12-15.180411.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-02-02T06-12-15.180411.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-02-02T06-12-15.180411.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-02T06-12-15.180411.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-02-02T06-12-15.180411.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-02T06-12-15.180411.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-02T06-12-15.180411.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-02-02T06-12-15.180411.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-02-02T06-12-15.180411.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-02-02T06-12-15.180411.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-02T06-12-15.180411.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-02T06-12-15.180411.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-02T06-12-15.180411.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-02-02T06-12-15.180411.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-02T06-12-15.180411.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-02T06-12-15.180411.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-02T06-12-15.180411.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-02T06-12-15.180411.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-02-02T06-12-15.180411.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-02T06-12-15.180411.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-02T06-12-15.180411.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-02T06-12-15.180411.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-02T06-12-15.180411.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-02-02T06-12-15.180411.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-02-02T06-12-15.180411.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-02-02T06-12-15.180411.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-02-02T06-12-15.180411.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-02T06-12-15.180411.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-02-02T06-12-15.180411.parquet", "**/details_harness|hendrycksTest-management|5_2024-02-02T06-12-15.180411.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-02-02T06-12-15.180411.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-02-02T06-12-15.180411.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-02-02T06-12-15.180411.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-02-02T06-12-15.180411.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-02T06-12-15.180411.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-02-02T06-12-15.180411.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-02-02T06-12-15.180411.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-02-02T06-12-15.180411.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-02-02T06-12-15.180411.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-02-02T06-12-15.180411.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-02-02T06-12-15.180411.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-02-02T06-12-15.180411.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-02-02T06-12-15.180411.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-02-02T06-12-15.180411.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-02-02T06-12-15.180411.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-02T06-12-15.180411.parquet", "**/details_harness|hendrycksTest-virology|5_2024-02-02T06-12-15.180411.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-02-02T06-12-15.180411.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-02T06-12-15.180411.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-02-02T06-12-15.180411.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-02-02T06-12-15.180411.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-02-02T06-12-15.180411.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-02T06-12-15.180411.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-02-02T06-12-15.180411.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-02-02T06-12-15.180411.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-02-02T06-12-15.180411.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-02-02T06-12-15.180411.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-02-02T06-12-15.180411.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-02-02T06-12-15.180411.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-02-02T06-12-15.180411.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-02T06-12-15.180411.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-02-02T06-12-15.180411.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-02T06-12-15.180411.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-02T06-12-15.180411.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-02-02T06-12-15.180411.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-02-02T06-12-15.180411.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-02-02T06-12-15.180411.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-02T06-12-15.180411.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-02T06-12-15.180411.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-02T06-12-15.180411.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-02-02T06-12-15.180411.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-02T06-12-15.180411.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-02T06-12-15.180411.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-02T06-12-15.180411.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-02T06-12-15.180411.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-02-02T06-12-15.180411.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-02T06-12-15.180411.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-02T06-12-15.180411.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-02T06-12-15.180411.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-02T06-12-15.180411.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-02-02T06-12-15.180411.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-02-02T06-12-15.180411.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-02-02T06-12-15.180411.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-02-02T06-12-15.180411.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-02T06-12-15.180411.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-02-02T06-12-15.180411.parquet", "**/details_harness|hendrycksTest-management|5_2024-02-02T06-12-15.180411.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-02-02T06-12-15.180411.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-02-02T06-12-15.180411.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-02-02T06-12-15.180411.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-02-02T06-12-15.180411.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-02T06-12-15.180411.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-02-02T06-12-15.180411.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-02-02T06-12-15.180411.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-02-02T06-12-15.180411.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-02-02T06-12-15.180411.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-02-02T06-12-15.180411.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-02-02T06-12-15.180411.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-02-02T06-12-15.180411.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-02-02T06-12-15.180411.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-02-02T06-12-15.180411.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-02-02T06-12-15.180411.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-02T06-12-15.180411.parquet", "**/details_harness|hendrycksTest-virology|5_2024-02-02T06-12-15.180411.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-02-02T06-12-15.180411.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_02_02T06_12_15.180411", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-02T06-12-15.180411.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-02T06-12-15.180411.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_02_02T06_12_15.180411", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-02-02T06-12-15.180411.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-02-02T06-12-15.180411.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_02_02T06_12_15.180411", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-02-02T06-12-15.180411.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-02-02T06-12-15.180411.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_02_02T06_12_15.180411", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-02-02T06-12-15.180411.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-02-02T06-12-15.180411.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_02_02T06_12_15.180411", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-02T06-12-15.180411.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-02T06-12-15.180411.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_02_02T06_12_15.180411", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-02-02T06-12-15.180411.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-02-02T06-12-15.180411.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_02_02T06_12_15.180411", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-02-02T06-12-15.180411.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-02-02T06-12-15.180411.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_02_02T06_12_15.180411", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-02-02T06-12-15.180411.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-02-02T06-12-15.180411.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_02_02T06_12_15.180411", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-02-02T06-12-15.180411.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-02-02T06-12-15.180411.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_02_02T06_12_15.180411", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-02-02T06-12-15.180411.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-02-02T06-12-15.180411.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_02_02T06_12_15.180411", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-02-02T06-12-15.180411.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-02-02T06-12-15.180411.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_02_02T06_12_15.180411", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-02-02T06-12-15.180411.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-02-02T06-12-15.180411.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_02_02T06_12_15.180411", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-02T06-12-15.180411.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-02T06-12-15.180411.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_02_02T06_12_15.180411", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-02-02T06-12-15.180411.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-02-02T06-12-15.180411.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_02_02T06_12_15.180411", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-02T06-12-15.180411.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-02T06-12-15.180411.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_02_02T06_12_15.180411", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-02T06-12-15.180411.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-02T06-12-15.180411.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_02_02T06_12_15.180411", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-02-02T06-12-15.180411.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-02-02T06-12-15.180411.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_02_02T06_12_15.180411", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-02-02T06-12-15.180411.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-02-02T06-12-15.180411.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_02_02T06_12_15.180411", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-02-02T06-12-15.180411.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-02-02T06-12-15.180411.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_02_02T06_12_15.180411", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-02T06-12-15.180411.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-02T06-12-15.180411.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_02_02T06_12_15.180411", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-02T06-12-15.180411.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-02T06-12-15.180411.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_02_02T06_12_15.180411", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-02T06-12-15.180411.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-02T06-12-15.180411.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_02_02T06_12_15.180411", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-02-02T06-12-15.180411.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-02-02T06-12-15.180411.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_02_02T06_12_15.180411", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-02T06-12-15.180411.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-02T06-12-15.180411.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_02_02T06_12_15.180411", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-02T06-12-15.180411.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-02T06-12-15.180411.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_02_02T06_12_15.180411", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-02T06-12-15.180411.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-02T06-12-15.180411.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_02_02T06_12_15.180411", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-02T06-12-15.180411.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-02T06-12-15.180411.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_02_02T06_12_15.180411", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-02-02T06-12-15.180411.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-02-02T06-12-15.180411.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_02_02T06_12_15.180411", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-02T06-12-15.180411.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-02T06-12-15.180411.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_02_02T06_12_15.180411", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-02T06-12-15.180411.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-02T06-12-15.180411.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_02_02T06_12_15.180411", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-02T06-12-15.180411.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-02T06-12-15.180411.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_02_02T06_12_15.180411", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-02T06-12-15.180411.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-02T06-12-15.180411.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_02_02T06_12_15.180411", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-02-02T06-12-15.180411.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-02-02T06-12-15.180411.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_02_02T06_12_15.180411", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-02-02T06-12-15.180411.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-02-02T06-12-15.180411.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_02_02T06_12_15.180411", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-02-02T06-12-15.180411.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-02-02T06-12-15.180411.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_02_02T06_12_15.180411", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-02-02T06-12-15.180411.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-02-02T06-12-15.180411.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_02_02T06_12_15.180411", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-02T06-12-15.180411.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-02T06-12-15.180411.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_02_02T06_12_15.180411", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-02-02T06-12-15.180411.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-02-02T06-12-15.180411.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_02_02T06_12_15.180411", "path": ["**/details_harness|hendrycksTest-management|5_2024-02-02T06-12-15.180411.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-02-02T06-12-15.180411.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_02_02T06_12_15.180411", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-02-02T06-12-15.180411.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-02-02T06-12-15.180411.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_02_02T06_12_15.180411", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-02-02T06-12-15.180411.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-02-02T06-12-15.180411.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_02_02T06_12_15.180411", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-02-02T06-12-15.180411.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-02-02T06-12-15.180411.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_02_02T06_12_15.180411", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-02-02T06-12-15.180411.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-02-02T06-12-15.180411.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_02_02T06_12_15.180411", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-02T06-12-15.180411.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-02T06-12-15.180411.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_02_02T06_12_15.180411", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-02-02T06-12-15.180411.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-02-02T06-12-15.180411.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_02_02T06_12_15.180411", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-02-02T06-12-15.180411.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-02-02T06-12-15.180411.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_02_02T06_12_15.180411", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-02-02T06-12-15.180411.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-02-02T06-12-15.180411.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_02_02T06_12_15.180411", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-02-02T06-12-15.180411.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-02-02T06-12-15.180411.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_02_02T06_12_15.180411", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-02-02T06-12-15.180411.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-02-02T06-12-15.180411.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_02_02T06_12_15.180411", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-02-02T06-12-15.180411.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-02-02T06-12-15.180411.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_02_02T06_12_15.180411", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-02-02T06-12-15.180411.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-02-02T06-12-15.180411.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_02_02T06_12_15.180411", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-02-02T06-12-15.180411.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-02-02T06-12-15.180411.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_02_02T06_12_15.180411", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-02-02T06-12-15.180411.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-02-02T06-12-15.180411.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_02_02T06_12_15.180411", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-02-02T06-12-15.180411.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-02-02T06-12-15.180411.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_02_02T06_12_15.180411", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-02T06-12-15.180411.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-02T06-12-15.180411.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_02_02T06_12_15.180411", "path": ["**/details_harness|hendrycksTest-virology|5_2024-02-02T06-12-15.180411.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-02-02T06-12-15.180411.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_02_02T06_12_15.180411", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-02-02T06-12-15.180411.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-02-02T06-12-15.180411.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_02_02T06_12_15.180411", "path": ["**/details_harness|truthfulqa:mc|0_2024-02-02T06-12-15.180411.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-02-02T06-12-15.180411.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_02_02T06_12_15.180411", "path": ["**/details_harness|winogrande|5_2024-02-02T06-12-15.180411.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-02-02T06-12-15.180411.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_02_02T06_12_15.180411", "path": ["results_2024-02-02T06-12-15.180411.parquet"]}, {"split": "latest", "path": ["results_2024-02-02T06-12-15.180411.parquet"]}]}]}
2024-02-02T06:15:07+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of ChuckMcSneed/SMaxxxer-v1-70b Dataset automatically created during the evaluation run of model ChuckMcSneed/SMaxxxer-v1-70b on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-02-02T06:12:15.180411(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of ChuckMcSneed/SMaxxxer-v1-70b\n\n\n\nDataset automatically created during the evaluation run of model ChuckMcSneed/SMaxxxer-v1-70b on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-02-02T06:12:15.180411(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of ChuckMcSneed/SMaxxxer-v1-70b\n\n\n\nDataset automatically created during the evaluation run of model ChuckMcSneed/SMaxxxer-v1-70b on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-02-02T06:12:15.180411(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
954f0f030049234970bd9d1d9802d9de85ab5ae7
# Dataset Card for Evaluation run of codellama/CodeLlama-70b-Instruct-hf <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [codellama/CodeLlama-70b-Instruct-hf](https://huggingface.co/codellama/CodeLlama-70b-Instruct-hf) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_codellama__CodeLlama-70b-Instruct-hf", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-02-02T06:15:21.306042](https://huggingface.co/datasets/open-llm-leaderboard/details_codellama__CodeLlama-70b-Instruct-hf/blob/main/results_2024-02-02T06-15-21.306042.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.5648604524571269, "acc_stderr": 0.03399038244243267, "acc_norm": 0.5673299922382132, "acc_norm_stderr": 0.034688595336715734, "mc1": 0.3525091799265606, "mc1_stderr": 0.016724646380756544, "mc2": 0.5044393244952377, "mc2_stderr": 0.015451705191766632 }, "harness|arc:challenge|25": { "acc": 0.5179180887372014, "acc_stderr": 0.014602005585490982, "acc_norm": 0.5503412969283277, "acc_norm_stderr": 0.014537144444284741 }, "harness|hellaswag|10": { "acc": 0.5812587134037045, "acc_stderr": 0.0049234456278615234, "acc_norm": 0.7723561043616809, "acc_norm_stderr": 0.00418454567538735 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.32, "acc_stderr": 0.046882617226215034, "acc_norm": 0.32, "acc_norm_stderr": 0.046882617226215034 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.5259259259259259, "acc_stderr": 0.04313531696750575, "acc_norm": 0.5259259259259259, "acc_norm_stderr": 0.04313531696750575 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.5723684210526315, "acc_stderr": 0.04026097083296562, "acc_norm": 0.5723684210526315, "acc_norm_stderr": 0.04026097083296562 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.6, "acc_stderr": 0.049236596391733084, "acc_norm": 0.6, "acc_norm_stderr": 0.049236596391733084 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.539622641509434, "acc_stderr": 0.030676096599389177, "acc_norm": 0.539622641509434, "acc_norm_stderr": 0.030676096599389177 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.5208333333333334, "acc_stderr": 0.04177578950739993, "acc_norm": 0.5208333333333334, "acc_norm_stderr": 0.04177578950739993 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.36, "acc_stderr": 0.04824181513244218, "acc_norm": 0.36, "acc_norm_stderr": 0.04824181513244218 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.5, "acc_stderr": 0.050251890762960605, "acc_norm": 0.5, "acc_norm_stderr": 0.050251890762960605 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.42, "acc_stderr": 0.049604496374885836, "acc_norm": 0.42, "acc_norm_stderr": 0.049604496374885836 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.48554913294797686, "acc_stderr": 0.03810871630454764, "acc_norm": 0.48554913294797686, "acc_norm_stderr": 0.03810871630454764 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.30392156862745096, "acc_stderr": 0.045766654032077636, "acc_norm": 0.30392156862745096, "acc_norm_stderr": 0.045766654032077636 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.71, "acc_stderr": 0.045604802157206845, "acc_norm": 0.71, "acc_norm_stderr": 0.045604802157206845 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.5234042553191489, "acc_stderr": 0.03265019475033582, "acc_norm": 0.5234042553191489, "acc_norm_stderr": 0.03265019475033582 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.42105263157894735, "acc_stderr": 0.046446020912223177, "acc_norm": 0.42105263157894735, "acc_norm_stderr": 0.046446020912223177 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.5379310344827586, "acc_stderr": 0.041546596717075474, "acc_norm": 0.5379310344827586, "acc_norm_stderr": 0.041546596717075474 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.42857142857142855, "acc_stderr": 0.025487187147859375, "acc_norm": 0.42857142857142855, "acc_norm_stderr": 0.025487187147859375 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.4444444444444444, "acc_stderr": 0.044444444444444495, "acc_norm": 0.4444444444444444, "acc_norm_stderr": 0.044444444444444495 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.34, "acc_stderr": 0.04760952285695235, "acc_norm": 0.34, "acc_norm_stderr": 0.04760952285695235 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.635483870967742, "acc_stderr": 0.02737987122994325, "acc_norm": 0.635483870967742, "acc_norm_stderr": 0.02737987122994325 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.39408866995073893, "acc_stderr": 0.034381579670365446, "acc_norm": 0.39408866995073893, "acc_norm_stderr": 0.034381579670365446 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.73, "acc_stderr": 0.044619604333847394, "acc_norm": 0.73, "acc_norm_stderr": 0.044619604333847394 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.7333333333333333, "acc_stderr": 0.03453131801885417, "acc_norm": 0.7333333333333333, "acc_norm_stderr": 0.03453131801885417 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.702020202020202, "acc_stderr": 0.03258630383836556, "acc_norm": 0.702020202020202, "acc_norm_stderr": 0.03258630383836556 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.7875647668393783, "acc_stderr": 0.02951928261681723, "acc_norm": 0.7875647668393783, "acc_norm_stderr": 0.02951928261681723 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.5230769230769231, "acc_stderr": 0.025323990861736232, "acc_norm": 0.5230769230769231, "acc_norm_stderr": 0.025323990861736232 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.362962962962963, "acc_stderr": 0.02931820364520686, "acc_norm": 0.362962962962963, "acc_norm_stderr": 0.02931820364520686 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.5294117647058824, "acc_stderr": 0.03242225027115007, "acc_norm": 0.5294117647058824, "acc_norm_stderr": 0.03242225027115007 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.39072847682119205, "acc_stderr": 0.03983798306659806, "acc_norm": 0.39072847682119205, "acc_norm_stderr": 0.03983798306659806 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.7504587155963303, "acc_stderr": 0.018553897629501624, "acc_norm": 0.7504587155963303, "acc_norm_stderr": 0.018553897629501624 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.4305555555555556, "acc_stderr": 0.03376922151252335, "acc_norm": 0.4305555555555556, "acc_norm_stderr": 0.03376922151252335 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.7303921568627451, "acc_stderr": 0.031145570659486782, "acc_norm": 0.7303921568627451, "acc_norm_stderr": 0.031145570659486782 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.7510548523206751, "acc_stderr": 0.028146970599422644, "acc_norm": 0.7510548523206751, "acc_norm_stderr": 0.028146970599422644 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.5964125560538116, "acc_stderr": 0.03292802819330314, "acc_norm": 0.5964125560538116, "acc_norm_stderr": 0.03292802819330314 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.6259541984732825, "acc_stderr": 0.042438692422305246, "acc_norm": 0.6259541984732825, "acc_norm_stderr": 0.042438692422305246 }, "harness|hendrycksTest-international_law|5": { "acc": 0.7024793388429752, "acc_stderr": 0.04173349148083499, "acc_norm": 0.7024793388429752, "acc_norm_stderr": 0.04173349148083499 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.6851851851851852, "acc_stderr": 0.04489931073591312, "acc_norm": 0.6851851851851852, "acc_norm_stderr": 0.04489931073591312 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.7177914110429447, "acc_stderr": 0.03536117886664742, "acc_norm": 0.7177914110429447, "acc_norm_stderr": 0.03536117886664742 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.42857142857142855, "acc_stderr": 0.04697113923010212, "acc_norm": 0.42857142857142855, "acc_norm_stderr": 0.04697113923010212 }, "harness|hendrycksTest-management|5": { "acc": 0.7475728155339806, "acc_stderr": 0.04301250399690878, "acc_norm": 0.7475728155339806, "acc_norm_stderr": 0.04301250399690878 }, "harness|hendrycksTest-marketing|5": { "acc": 0.8247863247863247, "acc_stderr": 0.02490443909891823, "acc_norm": 0.8247863247863247, "acc_norm_stderr": 0.02490443909891823 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.58, "acc_stderr": 0.04960449637488583, "acc_norm": 0.58, "acc_norm_stderr": 0.04960449637488583 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.7100893997445722, "acc_stderr": 0.016225017944770978, "acc_norm": 0.7100893997445722, "acc_norm_stderr": 0.016225017944770978 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.5867052023121387, "acc_stderr": 0.026511261369409244, "acc_norm": 0.5867052023121387, "acc_norm_stderr": 0.026511261369409244 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.3553072625698324, "acc_stderr": 0.01600698993480318, "acc_norm": 0.3553072625698324, "acc_norm_stderr": 0.01600698993480318 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.5522875816993464, "acc_stderr": 0.02847293847803353, "acc_norm": 0.5522875816993464, "acc_norm_stderr": 0.02847293847803353 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.6302250803858521, "acc_stderr": 0.027417996705630998, "acc_norm": 0.6302250803858521, "acc_norm_stderr": 0.027417996705630998 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.5864197530864198, "acc_stderr": 0.027402042040269966, "acc_norm": 0.5864197530864198, "acc_norm_stderr": 0.027402042040269966 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.45390070921985815, "acc_stderr": 0.029700453247291474, "acc_norm": 0.45390070921985815, "acc_norm_stderr": 0.029700453247291474 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.41460234680573665, "acc_stderr": 0.012582597058908284, "acc_norm": 0.41460234680573665, "acc_norm_stderr": 0.012582597058908284 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.41911764705882354, "acc_stderr": 0.029972807170464626, "acc_norm": 0.41911764705882354, "acc_norm_stderr": 0.029972807170464626 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.5408496732026143, "acc_stderr": 0.020160213617222516, "acc_norm": 0.5408496732026143, "acc_norm_stderr": 0.020160213617222516 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.6727272727272727, "acc_stderr": 0.0449429086625209, "acc_norm": 0.6727272727272727, "acc_norm_stderr": 0.0449429086625209 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.6489795918367347, "acc_stderr": 0.03055531675557364, "acc_norm": 0.6489795918367347, "acc_norm_stderr": 0.03055531675557364 }, "harness|hendrycksTest-sociology|5": { "acc": 0.7661691542288557, "acc_stderr": 0.029929415408348384, "acc_norm": 0.7661691542288557, "acc_norm_stderr": 0.029929415408348384 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.74, "acc_stderr": 0.044084400227680794, "acc_norm": 0.74, "acc_norm_stderr": 0.044084400227680794 }, "harness|hendrycksTest-virology|5": { "acc": 0.43373493975903615, "acc_stderr": 0.03858158940685517, "acc_norm": 0.43373493975903615, "acc_norm_stderr": 0.03858158940685517 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.7251461988304093, "acc_stderr": 0.03424042924691584, "acc_norm": 0.7251461988304093, "acc_norm_stderr": 0.03424042924691584 }, "harness|truthfulqa:mc|0": { "mc1": 0.3525091799265606, "mc1_stderr": 0.016724646380756544, "mc2": 0.5044393244952377, "mc2_stderr": 0.015451705191766632 }, "harness|winogrande|5": { "acc": 0.745067087608524, "acc_stderr": 0.012248806969376422 }, "harness|gsm8k|5": { "acc": 0.4624715693707354, "acc_stderr": 0.013733636059107756 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_codellama__CodeLlama-70b-Instruct-hf
[ "region:us" ]
2024-02-02T06:17:45+00:00
{"pretty_name": "Evaluation run of codellama/CodeLlama-70b-Instruct-hf", "dataset_summary": "Dataset automatically created during the evaluation run of model [codellama/CodeLlama-70b-Instruct-hf](https://huggingface.co/codellama/CodeLlama-70b-Instruct-hf) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_codellama__CodeLlama-70b-Instruct-hf\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-02-02T06:15:21.306042](https://huggingface.co/datasets/open-llm-leaderboard/details_codellama__CodeLlama-70b-Instruct-hf/blob/main/results_2024-02-02T06-15-21.306042.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.5648604524571269,\n \"acc_stderr\": 0.03399038244243267,\n \"acc_norm\": 0.5673299922382132,\n \"acc_norm_stderr\": 0.034688595336715734,\n \"mc1\": 0.3525091799265606,\n \"mc1_stderr\": 0.016724646380756544,\n \"mc2\": 0.5044393244952377,\n \"mc2_stderr\": 0.015451705191766632\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.5179180887372014,\n \"acc_stderr\": 0.014602005585490982,\n \"acc_norm\": 0.5503412969283277,\n \"acc_norm_stderr\": 0.014537144444284741\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.5812587134037045,\n \"acc_stderr\": 0.0049234456278615234,\n \"acc_norm\": 0.7723561043616809,\n \"acc_norm_stderr\": 0.00418454567538735\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.32,\n \"acc_stderr\": 0.046882617226215034,\n \"acc_norm\": 0.32,\n \"acc_norm_stderr\": 0.046882617226215034\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.5259259259259259,\n \"acc_stderr\": 0.04313531696750575,\n \"acc_norm\": 0.5259259259259259,\n \"acc_norm_stderr\": 0.04313531696750575\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.5723684210526315,\n \"acc_stderr\": 0.04026097083296562,\n \"acc_norm\": 0.5723684210526315,\n \"acc_norm_stderr\": 0.04026097083296562\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.6,\n \"acc_stderr\": 0.049236596391733084,\n \"acc_norm\": 0.6,\n \"acc_norm_stderr\": 0.049236596391733084\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.539622641509434,\n \"acc_stderr\": 0.030676096599389177,\n \"acc_norm\": 0.539622641509434,\n \"acc_norm_stderr\": 0.030676096599389177\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.5208333333333334,\n \"acc_stderr\": 0.04177578950739993,\n \"acc_norm\": 0.5208333333333334,\n \"acc_norm_stderr\": 0.04177578950739993\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.36,\n \"acc_stderr\": 0.04824181513244218,\n \"acc_norm\": 0.36,\n \"acc_norm_stderr\": 0.04824181513244218\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.5,\n \"acc_stderr\": 0.050251890762960605,\n \"acc_norm\": 0.5,\n \"acc_norm_stderr\": 0.050251890762960605\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.42,\n \"acc_stderr\": 0.049604496374885836,\n \"acc_norm\": 0.42,\n \"acc_norm_stderr\": 0.049604496374885836\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.48554913294797686,\n \"acc_stderr\": 0.03810871630454764,\n \"acc_norm\": 0.48554913294797686,\n \"acc_norm_stderr\": 0.03810871630454764\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.30392156862745096,\n \"acc_stderr\": 0.045766654032077636,\n \"acc_norm\": 0.30392156862745096,\n \"acc_norm_stderr\": 0.045766654032077636\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.71,\n \"acc_stderr\": 0.045604802157206845,\n \"acc_norm\": 0.71,\n \"acc_norm_stderr\": 0.045604802157206845\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.5234042553191489,\n \"acc_stderr\": 0.03265019475033582,\n \"acc_norm\": 0.5234042553191489,\n \"acc_norm_stderr\": 0.03265019475033582\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.42105263157894735,\n \"acc_stderr\": 0.046446020912223177,\n \"acc_norm\": 0.42105263157894735,\n \"acc_norm_stderr\": 0.046446020912223177\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.5379310344827586,\n \"acc_stderr\": 0.041546596717075474,\n \"acc_norm\": 0.5379310344827586,\n \"acc_norm_stderr\": 0.041546596717075474\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.42857142857142855,\n \"acc_stderr\": 0.025487187147859375,\n \"acc_norm\": 0.42857142857142855,\n \"acc_norm_stderr\": 0.025487187147859375\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.4444444444444444,\n \"acc_stderr\": 0.044444444444444495,\n \"acc_norm\": 0.4444444444444444,\n \"acc_norm_stderr\": 0.044444444444444495\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.34,\n \"acc_stderr\": 0.04760952285695235,\n \"acc_norm\": 0.34,\n \"acc_norm_stderr\": 0.04760952285695235\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.635483870967742,\n \"acc_stderr\": 0.02737987122994325,\n \"acc_norm\": 0.635483870967742,\n \"acc_norm_stderr\": 0.02737987122994325\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.39408866995073893,\n \"acc_stderr\": 0.034381579670365446,\n \"acc_norm\": 0.39408866995073893,\n \"acc_norm_stderr\": 0.034381579670365446\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.73,\n \"acc_stderr\": 0.044619604333847394,\n \"acc_norm\": 0.73,\n \"acc_norm_stderr\": 0.044619604333847394\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.7333333333333333,\n \"acc_stderr\": 0.03453131801885417,\n \"acc_norm\": 0.7333333333333333,\n \"acc_norm_stderr\": 0.03453131801885417\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.702020202020202,\n \"acc_stderr\": 0.03258630383836556,\n \"acc_norm\": 0.702020202020202,\n \"acc_norm_stderr\": 0.03258630383836556\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.7875647668393783,\n \"acc_stderr\": 0.02951928261681723,\n \"acc_norm\": 0.7875647668393783,\n \"acc_norm_stderr\": 0.02951928261681723\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.5230769230769231,\n \"acc_stderr\": 0.025323990861736232,\n \"acc_norm\": 0.5230769230769231,\n \"acc_norm_stderr\": 0.025323990861736232\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.362962962962963,\n \"acc_stderr\": 0.02931820364520686,\n \"acc_norm\": 0.362962962962963,\n \"acc_norm_stderr\": 0.02931820364520686\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.5294117647058824,\n \"acc_stderr\": 0.03242225027115007,\n \"acc_norm\": 0.5294117647058824,\n \"acc_norm_stderr\": 0.03242225027115007\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.39072847682119205,\n \"acc_stderr\": 0.03983798306659806,\n \"acc_norm\": 0.39072847682119205,\n \"acc_norm_stderr\": 0.03983798306659806\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.7504587155963303,\n \"acc_stderr\": 0.018553897629501624,\n \"acc_norm\": 0.7504587155963303,\n \"acc_norm_stderr\": 0.018553897629501624\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.4305555555555556,\n \"acc_stderr\": 0.03376922151252335,\n \"acc_norm\": 0.4305555555555556,\n \"acc_norm_stderr\": 0.03376922151252335\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.7303921568627451,\n \"acc_stderr\": 0.031145570659486782,\n \"acc_norm\": 0.7303921568627451,\n \"acc_norm_stderr\": 0.031145570659486782\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.7510548523206751,\n \"acc_stderr\": 0.028146970599422644,\n \"acc_norm\": 0.7510548523206751,\n \"acc_norm_stderr\": 0.028146970599422644\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.5964125560538116,\n \"acc_stderr\": 0.03292802819330314,\n \"acc_norm\": 0.5964125560538116,\n \"acc_norm_stderr\": 0.03292802819330314\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.6259541984732825,\n \"acc_stderr\": 0.042438692422305246,\n \"acc_norm\": 0.6259541984732825,\n \"acc_norm_stderr\": 0.042438692422305246\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.7024793388429752,\n \"acc_stderr\": 0.04173349148083499,\n \"acc_norm\": 0.7024793388429752,\n \"acc_norm_stderr\": 0.04173349148083499\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.6851851851851852,\n \"acc_stderr\": 0.04489931073591312,\n \"acc_norm\": 0.6851851851851852,\n \"acc_norm_stderr\": 0.04489931073591312\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.7177914110429447,\n \"acc_stderr\": 0.03536117886664742,\n \"acc_norm\": 0.7177914110429447,\n \"acc_norm_stderr\": 0.03536117886664742\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.42857142857142855,\n \"acc_stderr\": 0.04697113923010212,\n \"acc_norm\": 0.42857142857142855,\n \"acc_norm_stderr\": 0.04697113923010212\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.7475728155339806,\n \"acc_stderr\": 0.04301250399690878,\n \"acc_norm\": 0.7475728155339806,\n \"acc_norm_stderr\": 0.04301250399690878\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8247863247863247,\n \"acc_stderr\": 0.02490443909891823,\n \"acc_norm\": 0.8247863247863247,\n \"acc_norm_stderr\": 0.02490443909891823\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.58,\n \"acc_stderr\": 0.04960449637488583,\n \"acc_norm\": 0.58,\n \"acc_norm_stderr\": 0.04960449637488583\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.7100893997445722,\n \"acc_stderr\": 0.016225017944770978,\n \"acc_norm\": 0.7100893997445722,\n \"acc_norm_stderr\": 0.016225017944770978\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.5867052023121387,\n \"acc_stderr\": 0.026511261369409244,\n \"acc_norm\": 0.5867052023121387,\n \"acc_norm_stderr\": 0.026511261369409244\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.3553072625698324,\n \"acc_stderr\": 0.01600698993480318,\n \"acc_norm\": 0.3553072625698324,\n \"acc_norm_stderr\": 0.01600698993480318\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.5522875816993464,\n \"acc_stderr\": 0.02847293847803353,\n \"acc_norm\": 0.5522875816993464,\n \"acc_norm_stderr\": 0.02847293847803353\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.6302250803858521,\n \"acc_stderr\": 0.027417996705630998,\n \"acc_norm\": 0.6302250803858521,\n \"acc_norm_stderr\": 0.027417996705630998\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.5864197530864198,\n \"acc_stderr\": 0.027402042040269966,\n \"acc_norm\": 0.5864197530864198,\n \"acc_norm_stderr\": 0.027402042040269966\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.45390070921985815,\n \"acc_stderr\": 0.029700453247291474,\n \"acc_norm\": 0.45390070921985815,\n \"acc_norm_stderr\": 0.029700453247291474\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.41460234680573665,\n \"acc_stderr\": 0.012582597058908284,\n \"acc_norm\": 0.41460234680573665,\n \"acc_norm_stderr\": 0.012582597058908284\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.41911764705882354,\n \"acc_stderr\": 0.029972807170464626,\n \"acc_norm\": 0.41911764705882354,\n \"acc_norm_stderr\": 0.029972807170464626\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.5408496732026143,\n \"acc_stderr\": 0.020160213617222516,\n \"acc_norm\": 0.5408496732026143,\n \"acc_norm_stderr\": 0.020160213617222516\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6727272727272727,\n \"acc_stderr\": 0.0449429086625209,\n \"acc_norm\": 0.6727272727272727,\n \"acc_norm_stderr\": 0.0449429086625209\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.6489795918367347,\n \"acc_stderr\": 0.03055531675557364,\n \"acc_norm\": 0.6489795918367347,\n \"acc_norm_stderr\": 0.03055531675557364\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.7661691542288557,\n \"acc_stderr\": 0.029929415408348384,\n \"acc_norm\": 0.7661691542288557,\n \"acc_norm_stderr\": 0.029929415408348384\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.74,\n \"acc_stderr\": 0.044084400227680794,\n \"acc_norm\": 0.74,\n \"acc_norm_stderr\": 0.044084400227680794\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.43373493975903615,\n \"acc_stderr\": 0.03858158940685517,\n \"acc_norm\": 0.43373493975903615,\n \"acc_norm_stderr\": 0.03858158940685517\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.7251461988304093,\n \"acc_stderr\": 0.03424042924691584,\n \"acc_norm\": 0.7251461988304093,\n \"acc_norm_stderr\": 0.03424042924691584\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.3525091799265606,\n \"mc1_stderr\": 0.016724646380756544,\n \"mc2\": 0.5044393244952377,\n \"mc2_stderr\": 0.015451705191766632\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.745067087608524,\n \"acc_stderr\": 0.012248806969376422\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.4624715693707354,\n \"acc_stderr\": 0.013733636059107756\n }\n}\n```", "repo_url": "https://huggingface.co/codellama/CodeLlama-70b-Instruct-hf", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_02_02T06_15_21.306042", "path": ["**/details_harness|arc:challenge|25_2024-02-02T06-15-21.306042.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-02-02T06-15-21.306042.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_02_02T06_15_21.306042", "path": ["**/details_harness|gsm8k|5_2024-02-02T06-15-21.306042.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-02-02T06-15-21.306042.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_02_02T06_15_21.306042", "path": ["**/details_harness|hellaswag|10_2024-02-02T06-15-21.306042.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-02-02T06-15-21.306042.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_02_02T06_15_21.306042", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-02T06-15-21.306042.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-02-02T06-15-21.306042.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-02-02T06-15-21.306042.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-02-02T06-15-21.306042.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-02T06-15-21.306042.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-02-02T06-15-21.306042.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-02-02T06-15-21.306042.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-02-02T06-15-21.306042.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-02-02T06-15-21.306042.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-02-02T06-15-21.306042.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-02-02T06-15-21.306042.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-02-02T06-15-21.306042.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-02T06-15-21.306042.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-02-02T06-15-21.306042.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-02T06-15-21.306042.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-02T06-15-21.306042.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-02-02T06-15-21.306042.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-02-02T06-15-21.306042.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-02-02T06-15-21.306042.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-02T06-15-21.306042.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-02T06-15-21.306042.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-02T06-15-21.306042.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-02-02T06-15-21.306042.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-02T06-15-21.306042.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-02T06-15-21.306042.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-02T06-15-21.306042.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-02T06-15-21.306042.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-02-02T06-15-21.306042.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-02T06-15-21.306042.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-02T06-15-21.306042.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-02T06-15-21.306042.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-02T06-15-21.306042.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-02-02T06-15-21.306042.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-02-02T06-15-21.306042.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-02-02T06-15-21.306042.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-02-02T06-15-21.306042.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-02T06-15-21.306042.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-02-02T06-15-21.306042.parquet", "**/details_harness|hendrycksTest-management|5_2024-02-02T06-15-21.306042.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-02-02T06-15-21.306042.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-02-02T06-15-21.306042.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-02-02T06-15-21.306042.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-02-02T06-15-21.306042.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-02T06-15-21.306042.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-02-02T06-15-21.306042.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-02-02T06-15-21.306042.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-02-02T06-15-21.306042.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-02-02T06-15-21.306042.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-02-02T06-15-21.306042.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-02-02T06-15-21.306042.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-02-02T06-15-21.306042.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-02-02T06-15-21.306042.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-02-02T06-15-21.306042.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-02-02T06-15-21.306042.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-02T06-15-21.306042.parquet", "**/details_harness|hendrycksTest-virology|5_2024-02-02T06-15-21.306042.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-02-02T06-15-21.306042.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-02T06-15-21.306042.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-02-02T06-15-21.306042.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-02-02T06-15-21.306042.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-02-02T06-15-21.306042.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-02T06-15-21.306042.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-02-02T06-15-21.306042.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-02-02T06-15-21.306042.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-02-02T06-15-21.306042.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-02-02T06-15-21.306042.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-02-02T06-15-21.306042.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-02-02T06-15-21.306042.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-02-02T06-15-21.306042.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-02T06-15-21.306042.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-02-02T06-15-21.306042.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-02T06-15-21.306042.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-02T06-15-21.306042.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-02-02T06-15-21.306042.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-02-02T06-15-21.306042.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-02-02T06-15-21.306042.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-02T06-15-21.306042.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-02T06-15-21.306042.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-02T06-15-21.306042.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-02-02T06-15-21.306042.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-02T06-15-21.306042.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-02T06-15-21.306042.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-02T06-15-21.306042.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-02T06-15-21.306042.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-02-02T06-15-21.306042.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-02T06-15-21.306042.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-02T06-15-21.306042.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-02T06-15-21.306042.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-02T06-15-21.306042.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-02-02T06-15-21.306042.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-02-02T06-15-21.306042.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-02-02T06-15-21.306042.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-02-02T06-15-21.306042.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-02T06-15-21.306042.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-02-02T06-15-21.306042.parquet", "**/details_harness|hendrycksTest-management|5_2024-02-02T06-15-21.306042.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-02-02T06-15-21.306042.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-02-02T06-15-21.306042.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-02-02T06-15-21.306042.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-02-02T06-15-21.306042.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-02T06-15-21.306042.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-02-02T06-15-21.306042.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-02-02T06-15-21.306042.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-02-02T06-15-21.306042.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-02-02T06-15-21.306042.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-02-02T06-15-21.306042.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-02-02T06-15-21.306042.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-02-02T06-15-21.306042.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-02-02T06-15-21.306042.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-02-02T06-15-21.306042.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-02-02T06-15-21.306042.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-02T06-15-21.306042.parquet", "**/details_harness|hendrycksTest-virology|5_2024-02-02T06-15-21.306042.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-02-02T06-15-21.306042.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_02_02T06_15_21.306042", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-02T06-15-21.306042.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-02T06-15-21.306042.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_02_02T06_15_21.306042", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-02-02T06-15-21.306042.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-02-02T06-15-21.306042.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_02_02T06_15_21.306042", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-02-02T06-15-21.306042.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-02-02T06-15-21.306042.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_02_02T06_15_21.306042", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-02-02T06-15-21.306042.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-02-02T06-15-21.306042.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_02_02T06_15_21.306042", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-02T06-15-21.306042.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-02T06-15-21.306042.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_02_02T06_15_21.306042", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-02-02T06-15-21.306042.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-02-02T06-15-21.306042.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_02_02T06_15_21.306042", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-02-02T06-15-21.306042.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-02-02T06-15-21.306042.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_02_02T06_15_21.306042", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-02-02T06-15-21.306042.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-02-02T06-15-21.306042.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_02_02T06_15_21.306042", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-02-02T06-15-21.306042.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-02-02T06-15-21.306042.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_02_02T06_15_21.306042", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-02-02T06-15-21.306042.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-02-02T06-15-21.306042.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_02_02T06_15_21.306042", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-02-02T06-15-21.306042.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-02-02T06-15-21.306042.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_02_02T06_15_21.306042", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-02-02T06-15-21.306042.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-02-02T06-15-21.306042.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_02_02T06_15_21.306042", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-02T06-15-21.306042.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-02T06-15-21.306042.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_02_02T06_15_21.306042", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-02-02T06-15-21.306042.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-02-02T06-15-21.306042.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_02_02T06_15_21.306042", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-02T06-15-21.306042.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-02T06-15-21.306042.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_02_02T06_15_21.306042", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-02T06-15-21.306042.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-02T06-15-21.306042.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_02_02T06_15_21.306042", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-02-02T06-15-21.306042.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-02-02T06-15-21.306042.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_02_02T06_15_21.306042", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-02-02T06-15-21.306042.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-02-02T06-15-21.306042.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_02_02T06_15_21.306042", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-02-02T06-15-21.306042.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-02-02T06-15-21.306042.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_02_02T06_15_21.306042", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-02T06-15-21.306042.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-02T06-15-21.306042.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_02_02T06_15_21.306042", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-02T06-15-21.306042.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-02T06-15-21.306042.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_02_02T06_15_21.306042", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-02T06-15-21.306042.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-02T06-15-21.306042.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_02_02T06_15_21.306042", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-02-02T06-15-21.306042.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-02-02T06-15-21.306042.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_02_02T06_15_21.306042", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-02T06-15-21.306042.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-02T06-15-21.306042.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_02_02T06_15_21.306042", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-02T06-15-21.306042.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-02T06-15-21.306042.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_02_02T06_15_21.306042", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-02T06-15-21.306042.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-02T06-15-21.306042.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_02_02T06_15_21.306042", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-02T06-15-21.306042.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-02T06-15-21.306042.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_02_02T06_15_21.306042", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-02-02T06-15-21.306042.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-02-02T06-15-21.306042.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_02_02T06_15_21.306042", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-02T06-15-21.306042.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-02T06-15-21.306042.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_02_02T06_15_21.306042", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-02T06-15-21.306042.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-02T06-15-21.306042.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_02_02T06_15_21.306042", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-02T06-15-21.306042.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-02T06-15-21.306042.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_02_02T06_15_21.306042", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-02T06-15-21.306042.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-02T06-15-21.306042.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_02_02T06_15_21.306042", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-02-02T06-15-21.306042.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-02-02T06-15-21.306042.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_02_02T06_15_21.306042", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-02-02T06-15-21.306042.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-02-02T06-15-21.306042.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_02_02T06_15_21.306042", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-02-02T06-15-21.306042.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-02-02T06-15-21.306042.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_02_02T06_15_21.306042", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-02-02T06-15-21.306042.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-02-02T06-15-21.306042.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_02_02T06_15_21.306042", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-02T06-15-21.306042.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-02T06-15-21.306042.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_02_02T06_15_21.306042", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-02-02T06-15-21.306042.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-02-02T06-15-21.306042.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_02_02T06_15_21.306042", "path": ["**/details_harness|hendrycksTest-management|5_2024-02-02T06-15-21.306042.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-02-02T06-15-21.306042.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_02_02T06_15_21.306042", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-02-02T06-15-21.306042.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-02-02T06-15-21.306042.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_02_02T06_15_21.306042", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-02-02T06-15-21.306042.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-02-02T06-15-21.306042.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_02_02T06_15_21.306042", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-02-02T06-15-21.306042.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-02-02T06-15-21.306042.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_02_02T06_15_21.306042", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-02-02T06-15-21.306042.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-02-02T06-15-21.306042.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_02_02T06_15_21.306042", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-02T06-15-21.306042.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-02T06-15-21.306042.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_02_02T06_15_21.306042", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-02-02T06-15-21.306042.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-02-02T06-15-21.306042.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_02_02T06_15_21.306042", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-02-02T06-15-21.306042.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-02-02T06-15-21.306042.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_02_02T06_15_21.306042", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-02-02T06-15-21.306042.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-02-02T06-15-21.306042.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_02_02T06_15_21.306042", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-02-02T06-15-21.306042.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-02-02T06-15-21.306042.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_02_02T06_15_21.306042", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-02-02T06-15-21.306042.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-02-02T06-15-21.306042.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_02_02T06_15_21.306042", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-02-02T06-15-21.306042.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-02-02T06-15-21.306042.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_02_02T06_15_21.306042", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-02-02T06-15-21.306042.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-02-02T06-15-21.306042.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_02_02T06_15_21.306042", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-02-02T06-15-21.306042.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-02-02T06-15-21.306042.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_02_02T06_15_21.306042", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-02-02T06-15-21.306042.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-02-02T06-15-21.306042.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_02_02T06_15_21.306042", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-02-02T06-15-21.306042.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-02-02T06-15-21.306042.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_02_02T06_15_21.306042", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-02T06-15-21.306042.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-02T06-15-21.306042.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_02_02T06_15_21.306042", "path": ["**/details_harness|hendrycksTest-virology|5_2024-02-02T06-15-21.306042.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-02-02T06-15-21.306042.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_02_02T06_15_21.306042", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-02-02T06-15-21.306042.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-02-02T06-15-21.306042.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_02_02T06_15_21.306042", "path": ["**/details_harness|truthfulqa:mc|0_2024-02-02T06-15-21.306042.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-02-02T06-15-21.306042.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_02_02T06_15_21.306042", "path": ["**/details_harness|winogrande|5_2024-02-02T06-15-21.306042.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-02-02T06-15-21.306042.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_02_02T06_15_21.306042", "path": ["results_2024-02-02T06-15-21.306042.parquet"]}, {"split": "latest", "path": ["results_2024-02-02T06-15-21.306042.parquet"]}]}]}
2024-02-02T06:18:12+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of codellama/CodeLlama-70b-Instruct-hf Dataset automatically created during the evaluation run of model codellama/CodeLlama-70b-Instruct-hf on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-02-02T06:15:21.306042(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of codellama/CodeLlama-70b-Instruct-hf\n\n\n\nDataset automatically created during the evaluation run of model codellama/CodeLlama-70b-Instruct-hf on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-02-02T06:15:21.306042(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of codellama/CodeLlama-70b-Instruct-hf\n\n\n\nDataset automatically created during the evaluation run of model codellama/CodeLlama-70b-Instruct-hf on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-02-02T06:15:21.306042(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
37bfcbe0cfaa42065f9f9493a8c53499c866b194
# Dataset Card for Evaluation run of codellama/CodeLlama-70b-Python-hf <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [codellama/CodeLlama-70b-Python-hf](https://huggingface.co/codellama/CodeLlama-70b-Python-hf) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_codellama__CodeLlama-70b-Python-hf", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-02-02T06:17:38.337900](https://huggingface.co/datasets/open-llm-leaderboard/details_codellama__CodeLlama-70b-Python-hf/blob/main/results_2024-02-02T06-17-38.337900.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.5616930765303749, "acc_stderr": 0.033883080419050034, "acc_norm": 0.5652706193571843, "acc_norm_stderr": 0.03457354414367789, "mc1": 0.2864137086903305, "mc1_stderr": 0.015826142439502342, "mc2": 0.4178040204095389, "mc2_stderr": 0.0149114399190204 }, "harness|arc:challenge|25": { "acc": 0.5025597269624573, "acc_stderr": 0.014611199329843774, "acc_norm": 0.5511945392491467, "acc_norm_stderr": 0.014534599585097669 }, "harness|hellaswag|10": { "acc": 0.5812587134037045, "acc_stderr": 0.00492344562786152, "acc_norm": 0.7848038239394542, "acc_norm_stderr": 0.004101184870964187 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.32, "acc_stderr": 0.046882617226215034, "acc_norm": 0.32, "acc_norm_stderr": 0.046882617226215034 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.5185185185185185, "acc_stderr": 0.043163785995113245, "acc_norm": 0.5185185185185185, "acc_norm_stderr": 0.043163785995113245 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.6052631578947368, "acc_stderr": 0.039777499346220734, "acc_norm": 0.6052631578947368, "acc_norm_stderr": 0.039777499346220734 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.59, "acc_stderr": 0.04943110704237102, "acc_norm": 0.59, "acc_norm_stderr": 0.04943110704237102 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.5132075471698113, "acc_stderr": 0.030762134874500476, "acc_norm": 0.5132075471698113, "acc_norm_stderr": 0.030762134874500476 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.5486111111111112, "acc_stderr": 0.04161402398403279, "acc_norm": 0.5486111111111112, "acc_norm_stderr": 0.04161402398403279 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.35, "acc_stderr": 0.0479372485441102, "acc_norm": 0.35, "acc_norm_stderr": 0.0479372485441102 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.43, "acc_stderr": 0.049756985195624284, "acc_norm": 0.43, "acc_norm_stderr": 0.049756985195624284 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.37, "acc_stderr": 0.048523658709390974, "acc_norm": 0.37, "acc_norm_stderr": 0.048523658709390974 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.49710982658959535, "acc_stderr": 0.038124005659748335, "acc_norm": 0.49710982658959535, "acc_norm_stderr": 0.038124005659748335 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.28431372549019607, "acc_stderr": 0.04488482852329017, "acc_norm": 0.28431372549019607, "acc_norm_stderr": 0.04488482852329017 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.75, "acc_stderr": 0.04351941398892446, "acc_norm": 0.75, "acc_norm_stderr": 0.04351941398892446 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.5234042553191489, "acc_stderr": 0.03265019475033582, "acc_norm": 0.5234042553191489, "acc_norm_stderr": 0.03265019475033582 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.3684210526315789, "acc_stderr": 0.04537815354939391, "acc_norm": 0.3684210526315789, "acc_norm_stderr": 0.04537815354939391 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.5241379310344828, "acc_stderr": 0.041618085035015295, "acc_norm": 0.5241379310344828, "acc_norm_stderr": 0.041618085035015295 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.4365079365079365, "acc_stderr": 0.02554284681740049, "acc_norm": 0.4365079365079365, "acc_norm_stderr": 0.02554284681740049 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.4523809523809524, "acc_stderr": 0.04451807959055328, "acc_norm": 0.4523809523809524, "acc_norm_stderr": 0.04451807959055328 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.34, "acc_stderr": 0.04760952285695236, "acc_norm": 0.34, "acc_norm_stderr": 0.04760952285695236 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.6451612903225806, "acc_stderr": 0.027218889773308757, "acc_norm": 0.6451612903225806, "acc_norm_stderr": 0.027218889773308757 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.4039408866995074, "acc_stderr": 0.0345245390382204, "acc_norm": 0.4039408866995074, "acc_norm_stderr": 0.0345245390382204 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.71, "acc_stderr": 0.045604802157206845, "acc_norm": 0.71, "acc_norm_stderr": 0.045604802157206845 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.7272727272727273, "acc_stderr": 0.0347769116216366, "acc_norm": 0.7272727272727273, "acc_norm_stderr": 0.0347769116216366 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.6919191919191919, "acc_stderr": 0.032894773300986155, "acc_norm": 0.6919191919191919, "acc_norm_stderr": 0.032894773300986155 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.7564766839378239, "acc_stderr": 0.030975436386845426, "acc_norm": 0.7564766839378239, "acc_norm_stderr": 0.030975436386845426 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.5051282051282051, "acc_stderr": 0.02534967290683865, "acc_norm": 0.5051282051282051, "acc_norm_stderr": 0.02534967290683865 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.36666666666666664, "acc_stderr": 0.029381620726465073, "acc_norm": 0.36666666666666664, "acc_norm_stderr": 0.029381620726465073 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.5756302521008403, "acc_stderr": 0.03210479051015776, "acc_norm": 0.5756302521008403, "acc_norm_stderr": 0.03210479051015776 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.3509933774834437, "acc_stderr": 0.03896981964257375, "acc_norm": 0.3509933774834437, "acc_norm_stderr": 0.03896981964257375 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.7522935779816514, "acc_stderr": 0.018508143602547832, "acc_norm": 0.7522935779816514, "acc_norm_stderr": 0.018508143602547832 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.4861111111111111, "acc_stderr": 0.03408655867977748, "acc_norm": 0.4861111111111111, "acc_norm_stderr": 0.03408655867977748 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.7549019607843137, "acc_stderr": 0.030190282453501947, "acc_norm": 0.7549019607843137, "acc_norm_stderr": 0.030190282453501947 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.7637130801687764, "acc_stderr": 0.027652153144159256, "acc_norm": 0.7637130801687764, "acc_norm_stderr": 0.027652153144159256 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.5874439461883408, "acc_stderr": 0.03304062175449297, "acc_norm": 0.5874439461883408, "acc_norm_stderr": 0.03304062175449297 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.6412213740458015, "acc_stderr": 0.04206739313864908, "acc_norm": 0.6412213740458015, "acc_norm_stderr": 0.04206739313864908 }, "harness|hendrycksTest-international_law|5": { "acc": 0.71900826446281, "acc_stderr": 0.04103203830514512, "acc_norm": 0.71900826446281, "acc_norm_stderr": 0.04103203830514512 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.6944444444444444, "acc_stderr": 0.044531975073749834, "acc_norm": 0.6944444444444444, "acc_norm_stderr": 0.044531975073749834 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.7116564417177914, "acc_stderr": 0.03559039531617342, "acc_norm": 0.7116564417177914, "acc_norm_stderr": 0.03559039531617342 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.41964285714285715, "acc_stderr": 0.046840993210771065, "acc_norm": 0.41964285714285715, "acc_norm_stderr": 0.046840993210771065 }, "harness|hendrycksTest-management|5": { "acc": 0.7475728155339806, "acc_stderr": 0.04301250399690878, "acc_norm": 0.7475728155339806, "acc_norm_stderr": 0.04301250399690878 }, "harness|hendrycksTest-marketing|5": { "acc": 0.8333333333333334, "acc_stderr": 0.02441494730454368, "acc_norm": 0.8333333333333334, "acc_norm_stderr": 0.02441494730454368 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.53, "acc_stderr": 0.05016135580465919, "acc_norm": 0.53, "acc_norm_stderr": 0.05016135580465919 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.7011494252873564, "acc_stderr": 0.016369256815093138, "acc_norm": 0.7011494252873564, "acc_norm_stderr": 0.016369256815093138 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.5867052023121387, "acc_stderr": 0.026511261369409247, "acc_norm": 0.5867052023121387, "acc_norm_stderr": 0.026511261369409247 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.3396648044692737, "acc_stderr": 0.015839400406212505, "acc_norm": 0.3396648044692737, "acc_norm_stderr": 0.015839400406212505 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.5555555555555556, "acc_stderr": 0.02845263998508801, "acc_norm": 0.5555555555555556, "acc_norm_stderr": 0.02845263998508801 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.6270096463022508, "acc_stderr": 0.027466610213140112, "acc_norm": 0.6270096463022508, "acc_norm_stderr": 0.027466610213140112 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.5802469135802469, "acc_stderr": 0.02746009955700513, "acc_norm": 0.5802469135802469, "acc_norm_stderr": 0.02746009955700513 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.43617021276595747, "acc_stderr": 0.02958345203628407, "acc_norm": 0.43617021276595747, "acc_norm_stderr": 0.02958345203628407 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.4015645371577575, "acc_stderr": 0.012520315120147101, "acc_norm": 0.4015645371577575, "acc_norm_stderr": 0.012520315120147101 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.45588235294117646, "acc_stderr": 0.030254372573976687, "acc_norm": 0.45588235294117646, "acc_norm_stderr": 0.030254372573976687 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.5277777777777778, "acc_stderr": 0.020196594933541197, "acc_norm": 0.5277777777777778, "acc_norm_stderr": 0.020196594933541197 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.6909090909090909, "acc_stderr": 0.044262946482000985, "acc_norm": 0.6909090909090909, "acc_norm_stderr": 0.044262946482000985 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.6571428571428571, "acc_stderr": 0.030387262919547728, "acc_norm": 0.6571428571428571, "acc_norm_stderr": 0.030387262919547728 }, "harness|hendrycksTest-sociology|5": { "acc": 0.7512437810945274, "acc_stderr": 0.030567675938916707, "acc_norm": 0.7512437810945274, "acc_norm_stderr": 0.030567675938916707 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.76, "acc_stderr": 0.042923469599092816, "acc_norm": 0.76, "acc_norm_stderr": 0.042923469599092816 }, "harness|hendrycksTest-virology|5": { "acc": 0.4457831325301205, "acc_stderr": 0.03869543323472101, "acc_norm": 0.4457831325301205, "acc_norm_stderr": 0.03869543323472101 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.7017543859649122, "acc_stderr": 0.03508771929824565, "acc_norm": 0.7017543859649122, "acc_norm_stderr": 0.03508771929824565 }, "harness|truthfulqa:mc|0": { "mc1": 0.2864137086903305, "mc1_stderr": 0.015826142439502342, "mc2": 0.4178040204095389, "mc2_stderr": 0.0149114399190204 }, "harness|winogrande|5": { "acc": 0.7300710339384373, "acc_stderr": 0.0124764333720026 }, "harness|gsm8k|5": { "acc": 0.43442001516300227, "acc_stderr": 0.013653507211411411 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_codellama__CodeLlama-70b-Python-hf
[ "region:us" ]
2024-02-02T06:20:04+00:00
{"pretty_name": "Evaluation run of codellama/CodeLlama-70b-Python-hf", "dataset_summary": "Dataset automatically created during the evaluation run of model [codellama/CodeLlama-70b-Python-hf](https://huggingface.co/codellama/CodeLlama-70b-Python-hf) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_codellama__CodeLlama-70b-Python-hf\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-02-02T06:17:38.337900](https://huggingface.co/datasets/open-llm-leaderboard/details_codellama__CodeLlama-70b-Python-hf/blob/main/results_2024-02-02T06-17-38.337900.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.5616930765303749,\n \"acc_stderr\": 0.033883080419050034,\n \"acc_norm\": 0.5652706193571843,\n \"acc_norm_stderr\": 0.03457354414367789,\n \"mc1\": 0.2864137086903305,\n \"mc1_stderr\": 0.015826142439502342,\n \"mc2\": 0.4178040204095389,\n \"mc2_stderr\": 0.0149114399190204\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.5025597269624573,\n \"acc_stderr\": 0.014611199329843774,\n \"acc_norm\": 0.5511945392491467,\n \"acc_norm_stderr\": 0.014534599585097669\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.5812587134037045,\n \"acc_stderr\": 0.00492344562786152,\n \"acc_norm\": 0.7848038239394542,\n \"acc_norm_stderr\": 0.004101184870964187\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.32,\n \"acc_stderr\": 0.046882617226215034,\n \"acc_norm\": 0.32,\n \"acc_norm_stderr\": 0.046882617226215034\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.5185185185185185,\n \"acc_stderr\": 0.043163785995113245,\n \"acc_norm\": 0.5185185185185185,\n \"acc_norm_stderr\": 0.043163785995113245\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.6052631578947368,\n \"acc_stderr\": 0.039777499346220734,\n \"acc_norm\": 0.6052631578947368,\n \"acc_norm_stderr\": 0.039777499346220734\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.59,\n \"acc_stderr\": 0.04943110704237102,\n \"acc_norm\": 0.59,\n \"acc_norm_stderr\": 0.04943110704237102\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.5132075471698113,\n \"acc_stderr\": 0.030762134874500476,\n \"acc_norm\": 0.5132075471698113,\n \"acc_norm_stderr\": 0.030762134874500476\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.5486111111111112,\n \"acc_stderr\": 0.04161402398403279,\n \"acc_norm\": 0.5486111111111112,\n \"acc_norm_stderr\": 0.04161402398403279\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.35,\n \"acc_stderr\": 0.0479372485441102,\n \"acc_norm\": 0.35,\n \"acc_norm_stderr\": 0.0479372485441102\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.43,\n \"acc_stderr\": 0.049756985195624284,\n \"acc_norm\": 0.43,\n \"acc_norm_stderr\": 0.049756985195624284\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.37,\n \"acc_stderr\": 0.048523658709390974,\n \"acc_norm\": 0.37,\n \"acc_norm_stderr\": 0.048523658709390974\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.49710982658959535,\n \"acc_stderr\": 0.038124005659748335,\n \"acc_norm\": 0.49710982658959535,\n \"acc_norm_stderr\": 0.038124005659748335\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.28431372549019607,\n \"acc_stderr\": 0.04488482852329017,\n \"acc_norm\": 0.28431372549019607,\n \"acc_norm_stderr\": 0.04488482852329017\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.75,\n \"acc_stderr\": 0.04351941398892446,\n \"acc_norm\": 0.75,\n \"acc_norm_stderr\": 0.04351941398892446\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.5234042553191489,\n \"acc_stderr\": 0.03265019475033582,\n \"acc_norm\": 0.5234042553191489,\n \"acc_norm_stderr\": 0.03265019475033582\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.3684210526315789,\n \"acc_stderr\": 0.04537815354939391,\n \"acc_norm\": 0.3684210526315789,\n \"acc_norm_stderr\": 0.04537815354939391\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.5241379310344828,\n \"acc_stderr\": 0.041618085035015295,\n \"acc_norm\": 0.5241379310344828,\n \"acc_norm_stderr\": 0.041618085035015295\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.4365079365079365,\n \"acc_stderr\": 0.02554284681740049,\n \"acc_norm\": 0.4365079365079365,\n \"acc_norm_stderr\": 0.02554284681740049\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.4523809523809524,\n \"acc_stderr\": 0.04451807959055328,\n \"acc_norm\": 0.4523809523809524,\n \"acc_norm_stderr\": 0.04451807959055328\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.34,\n \"acc_stderr\": 0.04760952285695236,\n \"acc_norm\": 0.34,\n \"acc_norm_stderr\": 0.04760952285695236\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.6451612903225806,\n \"acc_stderr\": 0.027218889773308757,\n \"acc_norm\": 0.6451612903225806,\n \"acc_norm_stderr\": 0.027218889773308757\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.4039408866995074,\n \"acc_stderr\": 0.0345245390382204,\n \"acc_norm\": 0.4039408866995074,\n \"acc_norm_stderr\": 0.0345245390382204\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.71,\n \"acc_stderr\": 0.045604802157206845,\n \"acc_norm\": 0.71,\n \"acc_norm_stderr\": 0.045604802157206845\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.7272727272727273,\n \"acc_stderr\": 0.0347769116216366,\n \"acc_norm\": 0.7272727272727273,\n \"acc_norm_stderr\": 0.0347769116216366\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.6919191919191919,\n \"acc_stderr\": 0.032894773300986155,\n \"acc_norm\": 0.6919191919191919,\n \"acc_norm_stderr\": 0.032894773300986155\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.7564766839378239,\n \"acc_stderr\": 0.030975436386845426,\n \"acc_norm\": 0.7564766839378239,\n \"acc_norm_stderr\": 0.030975436386845426\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.5051282051282051,\n \"acc_stderr\": 0.02534967290683865,\n \"acc_norm\": 0.5051282051282051,\n \"acc_norm_stderr\": 0.02534967290683865\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.36666666666666664,\n \"acc_stderr\": 0.029381620726465073,\n \"acc_norm\": 0.36666666666666664,\n \"acc_norm_stderr\": 0.029381620726465073\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.5756302521008403,\n \"acc_stderr\": 0.03210479051015776,\n \"acc_norm\": 0.5756302521008403,\n \"acc_norm_stderr\": 0.03210479051015776\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.3509933774834437,\n \"acc_stderr\": 0.03896981964257375,\n \"acc_norm\": 0.3509933774834437,\n \"acc_norm_stderr\": 0.03896981964257375\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.7522935779816514,\n \"acc_stderr\": 0.018508143602547832,\n \"acc_norm\": 0.7522935779816514,\n \"acc_norm_stderr\": 0.018508143602547832\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.4861111111111111,\n \"acc_stderr\": 0.03408655867977748,\n \"acc_norm\": 0.4861111111111111,\n \"acc_norm_stderr\": 0.03408655867977748\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.7549019607843137,\n \"acc_stderr\": 0.030190282453501947,\n \"acc_norm\": 0.7549019607843137,\n \"acc_norm_stderr\": 0.030190282453501947\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.7637130801687764,\n \"acc_stderr\": 0.027652153144159256,\n \"acc_norm\": 0.7637130801687764,\n \"acc_norm_stderr\": 0.027652153144159256\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.5874439461883408,\n \"acc_stderr\": 0.03304062175449297,\n \"acc_norm\": 0.5874439461883408,\n \"acc_norm_stderr\": 0.03304062175449297\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.6412213740458015,\n \"acc_stderr\": 0.04206739313864908,\n \"acc_norm\": 0.6412213740458015,\n \"acc_norm_stderr\": 0.04206739313864908\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.71900826446281,\n \"acc_stderr\": 0.04103203830514512,\n \"acc_norm\": 0.71900826446281,\n \"acc_norm_stderr\": 0.04103203830514512\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.6944444444444444,\n \"acc_stderr\": 0.044531975073749834,\n \"acc_norm\": 0.6944444444444444,\n \"acc_norm_stderr\": 0.044531975073749834\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.7116564417177914,\n \"acc_stderr\": 0.03559039531617342,\n \"acc_norm\": 0.7116564417177914,\n \"acc_norm_stderr\": 0.03559039531617342\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.41964285714285715,\n \"acc_stderr\": 0.046840993210771065,\n \"acc_norm\": 0.41964285714285715,\n \"acc_norm_stderr\": 0.046840993210771065\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.7475728155339806,\n \"acc_stderr\": 0.04301250399690878,\n \"acc_norm\": 0.7475728155339806,\n \"acc_norm_stderr\": 0.04301250399690878\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8333333333333334,\n \"acc_stderr\": 0.02441494730454368,\n \"acc_norm\": 0.8333333333333334,\n \"acc_norm_stderr\": 0.02441494730454368\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.53,\n \"acc_stderr\": 0.05016135580465919,\n \"acc_norm\": 0.53,\n \"acc_norm_stderr\": 0.05016135580465919\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.7011494252873564,\n \"acc_stderr\": 0.016369256815093138,\n \"acc_norm\": 0.7011494252873564,\n \"acc_norm_stderr\": 0.016369256815093138\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.5867052023121387,\n \"acc_stderr\": 0.026511261369409247,\n \"acc_norm\": 0.5867052023121387,\n \"acc_norm_stderr\": 0.026511261369409247\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.3396648044692737,\n \"acc_stderr\": 0.015839400406212505,\n \"acc_norm\": 0.3396648044692737,\n \"acc_norm_stderr\": 0.015839400406212505\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.5555555555555556,\n \"acc_stderr\": 0.02845263998508801,\n \"acc_norm\": 0.5555555555555556,\n \"acc_norm_stderr\": 0.02845263998508801\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.6270096463022508,\n \"acc_stderr\": 0.027466610213140112,\n \"acc_norm\": 0.6270096463022508,\n \"acc_norm_stderr\": 0.027466610213140112\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.5802469135802469,\n \"acc_stderr\": 0.02746009955700513,\n \"acc_norm\": 0.5802469135802469,\n \"acc_norm_stderr\": 0.02746009955700513\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.43617021276595747,\n \"acc_stderr\": 0.02958345203628407,\n \"acc_norm\": 0.43617021276595747,\n \"acc_norm_stderr\": 0.02958345203628407\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.4015645371577575,\n \"acc_stderr\": 0.012520315120147101,\n \"acc_norm\": 0.4015645371577575,\n \"acc_norm_stderr\": 0.012520315120147101\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.45588235294117646,\n \"acc_stderr\": 0.030254372573976687,\n \"acc_norm\": 0.45588235294117646,\n \"acc_norm_stderr\": 0.030254372573976687\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.5277777777777778,\n \"acc_stderr\": 0.020196594933541197,\n \"acc_norm\": 0.5277777777777778,\n \"acc_norm_stderr\": 0.020196594933541197\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6909090909090909,\n \"acc_stderr\": 0.044262946482000985,\n \"acc_norm\": 0.6909090909090909,\n \"acc_norm_stderr\": 0.044262946482000985\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.6571428571428571,\n \"acc_stderr\": 0.030387262919547728,\n \"acc_norm\": 0.6571428571428571,\n \"acc_norm_stderr\": 0.030387262919547728\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.7512437810945274,\n \"acc_stderr\": 0.030567675938916707,\n \"acc_norm\": 0.7512437810945274,\n \"acc_norm_stderr\": 0.030567675938916707\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.76,\n \"acc_stderr\": 0.042923469599092816,\n \"acc_norm\": 0.76,\n \"acc_norm_stderr\": 0.042923469599092816\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.4457831325301205,\n \"acc_stderr\": 0.03869543323472101,\n \"acc_norm\": 0.4457831325301205,\n \"acc_norm_stderr\": 0.03869543323472101\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.7017543859649122,\n \"acc_stderr\": 0.03508771929824565,\n \"acc_norm\": 0.7017543859649122,\n \"acc_norm_stderr\": 0.03508771929824565\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.2864137086903305,\n \"mc1_stderr\": 0.015826142439502342,\n \"mc2\": 0.4178040204095389,\n \"mc2_stderr\": 0.0149114399190204\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.7300710339384373,\n \"acc_stderr\": 0.0124764333720026\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.43442001516300227,\n \"acc_stderr\": 0.013653507211411411\n }\n}\n```", "repo_url": "https://huggingface.co/codellama/CodeLlama-70b-Python-hf", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_02_02T06_17_38.337900", "path": ["**/details_harness|arc:challenge|25_2024-02-02T06-17-38.337900.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-02-02T06-17-38.337900.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_02_02T06_17_38.337900", "path": ["**/details_harness|gsm8k|5_2024-02-02T06-17-38.337900.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-02-02T06-17-38.337900.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_02_02T06_17_38.337900", "path": ["**/details_harness|hellaswag|10_2024-02-02T06-17-38.337900.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-02-02T06-17-38.337900.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_02_02T06_17_38.337900", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-02T06-17-38.337900.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-02-02T06-17-38.337900.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-02-02T06-17-38.337900.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-02-02T06-17-38.337900.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-02T06-17-38.337900.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-02-02T06-17-38.337900.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-02-02T06-17-38.337900.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-02-02T06-17-38.337900.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-02-02T06-17-38.337900.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-02-02T06-17-38.337900.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-02-02T06-17-38.337900.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-02-02T06-17-38.337900.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-02T06-17-38.337900.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-02-02T06-17-38.337900.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-02T06-17-38.337900.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-02T06-17-38.337900.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-02-02T06-17-38.337900.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-02-02T06-17-38.337900.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-02-02T06-17-38.337900.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-02T06-17-38.337900.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-02T06-17-38.337900.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-02T06-17-38.337900.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-02-02T06-17-38.337900.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-02T06-17-38.337900.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-02T06-17-38.337900.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-02T06-17-38.337900.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-02T06-17-38.337900.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-02-02T06-17-38.337900.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-02T06-17-38.337900.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-02T06-17-38.337900.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-02T06-17-38.337900.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-02T06-17-38.337900.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-02-02T06-17-38.337900.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-02-02T06-17-38.337900.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-02-02T06-17-38.337900.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-02-02T06-17-38.337900.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-02T06-17-38.337900.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-02-02T06-17-38.337900.parquet", "**/details_harness|hendrycksTest-management|5_2024-02-02T06-17-38.337900.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-02-02T06-17-38.337900.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-02-02T06-17-38.337900.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-02-02T06-17-38.337900.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-02-02T06-17-38.337900.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-02T06-17-38.337900.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-02-02T06-17-38.337900.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-02-02T06-17-38.337900.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-02-02T06-17-38.337900.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-02-02T06-17-38.337900.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-02-02T06-17-38.337900.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-02-02T06-17-38.337900.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-02-02T06-17-38.337900.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-02-02T06-17-38.337900.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-02-02T06-17-38.337900.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-02-02T06-17-38.337900.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-02T06-17-38.337900.parquet", "**/details_harness|hendrycksTest-virology|5_2024-02-02T06-17-38.337900.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-02-02T06-17-38.337900.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-02T06-17-38.337900.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-02-02T06-17-38.337900.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-02-02T06-17-38.337900.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-02-02T06-17-38.337900.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-02T06-17-38.337900.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-02-02T06-17-38.337900.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-02-02T06-17-38.337900.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-02-02T06-17-38.337900.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-02-02T06-17-38.337900.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-02-02T06-17-38.337900.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-02-02T06-17-38.337900.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-02-02T06-17-38.337900.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-02T06-17-38.337900.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-02-02T06-17-38.337900.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-02T06-17-38.337900.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-02T06-17-38.337900.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-02-02T06-17-38.337900.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-02-02T06-17-38.337900.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-02-02T06-17-38.337900.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-02T06-17-38.337900.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-02T06-17-38.337900.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-02T06-17-38.337900.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-02-02T06-17-38.337900.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-02T06-17-38.337900.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-02T06-17-38.337900.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-02T06-17-38.337900.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-02T06-17-38.337900.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-02-02T06-17-38.337900.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-02T06-17-38.337900.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-02T06-17-38.337900.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-02T06-17-38.337900.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-02T06-17-38.337900.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-02-02T06-17-38.337900.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-02-02T06-17-38.337900.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-02-02T06-17-38.337900.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-02-02T06-17-38.337900.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-02T06-17-38.337900.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-02-02T06-17-38.337900.parquet", "**/details_harness|hendrycksTest-management|5_2024-02-02T06-17-38.337900.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-02-02T06-17-38.337900.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-02-02T06-17-38.337900.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-02-02T06-17-38.337900.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-02-02T06-17-38.337900.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-02T06-17-38.337900.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-02-02T06-17-38.337900.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-02-02T06-17-38.337900.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-02-02T06-17-38.337900.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-02-02T06-17-38.337900.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-02-02T06-17-38.337900.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-02-02T06-17-38.337900.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-02-02T06-17-38.337900.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-02-02T06-17-38.337900.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-02-02T06-17-38.337900.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-02-02T06-17-38.337900.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-02T06-17-38.337900.parquet", "**/details_harness|hendrycksTest-virology|5_2024-02-02T06-17-38.337900.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-02-02T06-17-38.337900.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_02_02T06_17_38.337900", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-02T06-17-38.337900.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-02T06-17-38.337900.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_02_02T06_17_38.337900", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-02-02T06-17-38.337900.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-02-02T06-17-38.337900.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_02_02T06_17_38.337900", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-02-02T06-17-38.337900.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-02-02T06-17-38.337900.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_02_02T06_17_38.337900", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-02-02T06-17-38.337900.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-02-02T06-17-38.337900.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_02_02T06_17_38.337900", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-02T06-17-38.337900.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-02T06-17-38.337900.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_02_02T06_17_38.337900", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-02-02T06-17-38.337900.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-02-02T06-17-38.337900.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_02_02T06_17_38.337900", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-02-02T06-17-38.337900.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-02-02T06-17-38.337900.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_02_02T06_17_38.337900", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-02-02T06-17-38.337900.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-02-02T06-17-38.337900.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_02_02T06_17_38.337900", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-02-02T06-17-38.337900.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-02-02T06-17-38.337900.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_02_02T06_17_38.337900", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-02-02T06-17-38.337900.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-02-02T06-17-38.337900.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_02_02T06_17_38.337900", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-02-02T06-17-38.337900.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-02-02T06-17-38.337900.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_02_02T06_17_38.337900", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-02-02T06-17-38.337900.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-02-02T06-17-38.337900.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_02_02T06_17_38.337900", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-02T06-17-38.337900.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-02T06-17-38.337900.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_02_02T06_17_38.337900", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-02-02T06-17-38.337900.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-02-02T06-17-38.337900.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_02_02T06_17_38.337900", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-02T06-17-38.337900.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-02T06-17-38.337900.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_02_02T06_17_38.337900", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-02T06-17-38.337900.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-02T06-17-38.337900.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_02_02T06_17_38.337900", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-02-02T06-17-38.337900.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-02-02T06-17-38.337900.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_02_02T06_17_38.337900", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-02-02T06-17-38.337900.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-02-02T06-17-38.337900.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_02_02T06_17_38.337900", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-02-02T06-17-38.337900.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-02-02T06-17-38.337900.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_02_02T06_17_38.337900", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-02T06-17-38.337900.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-02T06-17-38.337900.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_02_02T06_17_38.337900", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-02T06-17-38.337900.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-02T06-17-38.337900.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_02_02T06_17_38.337900", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-02T06-17-38.337900.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-02T06-17-38.337900.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_02_02T06_17_38.337900", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-02-02T06-17-38.337900.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-02-02T06-17-38.337900.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_02_02T06_17_38.337900", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-02T06-17-38.337900.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-02T06-17-38.337900.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_02_02T06_17_38.337900", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-02T06-17-38.337900.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-02T06-17-38.337900.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_02_02T06_17_38.337900", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-02T06-17-38.337900.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-02T06-17-38.337900.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_02_02T06_17_38.337900", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-02T06-17-38.337900.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-02T06-17-38.337900.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_02_02T06_17_38.337900", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-02-02T06-17-38.337900.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-02-02T06-17-38.337900.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_02_02T06_17_38.337900", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-02T06-17-38.337900.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-02T06-17-38.337900.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_02_02T06_17_38.337900", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-02T06-17-38.337900.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-02T06-17-38.337900.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_02_02T06_17_38.337900", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-02T06-17-38.337900.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-02T06-17-38.337900.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_02_02T06_17_38.337900", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-02T06-17-38.337900.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-02T06-17-38.337900.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_02_02T06_17_38.337900", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-02-02T06-17-38.337900.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-02-02T06-17-38.337900.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_02_02T06_17_38.337900", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-02-02T06-17-38.337900.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-02-02T06-17-38.337900.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_02_02T06_17_38.337900", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-02-02T06-17-38.337900.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-02-02T06-17-38.337900.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_02_02T06_17_38.337900", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-02-02T06-17-38.337900.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-02-02T06-17-38.337900.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_02_02T06_17_38.337900", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-02T06-17-38.337900.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-02T06-17-38.337900.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_02_02T06_17_38.337900", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-02-02T06-17-38.337900.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-02-02T06-17-38.337900.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_02_02T06_17_38.337900", "path": ["**/details_harness|hendrycksTest-management|5_2024-02-02T06-17-38.337900.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-02-02T06-17-38.337900.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_02_02T06_17_38.337900", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-02-02T06-17-38.337900.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-02-02T06-17-38.337900.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_02_02T06_17_38.337900", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-02-02T06-17-38.337900.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-02-02T06-17-38.337900.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_02_02T06_17_38.337900", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-02-02T06-17-38.337900.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-02-02T06-17-38.337900.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_02_02T06_17_38.337900", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-02-02T06-17-38.337900.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-02-02T06-17-38.337900.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_02_02T06_17_38.337900", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-02T06-17-38.337900.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-02T06-17-38.337900.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_02_02T06_17_38.337900", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-02-02T06-17-38.337900.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-02-02T06-17-38.337900.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_02_02T06_17_38.337900", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-02-02T06-17-38.337900.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-02-02T06-17-38.337900.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_02_02T06_17_38.337900", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-02-02T06-17-38.337900.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-02-02T06-17-38.337900.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_02_02T06_17_38.337900", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-02-02T06-17-38.337900.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-02-02T06-17-38.337900.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_02_02T06_17_38.337900", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-02-02T06-17-38.337900.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-02-02T06-17-38.337900.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_02_02T06_17_38.337900", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-02-02T06-17-38.337900.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-02-02T06-17-38.337900.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_02_02T06_17_38.337900", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-02-02T06-17-38.337900.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-02-02T06-17-38.337900.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_02_02T06_17_38.337900", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-02-02T06-17-38.337900.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-02-02T06-17-38.337900.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_02_02T06_17_38.337900", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-02-02T06-17-38.337900.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-02-02T06-17-38.337900.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_02_02T06_17_38.337900", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-02-02T06-17-38.337900.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-02-02T06-17-38.337900.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_02_02T06_17_38.337900", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-02T06-17-38.337900.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-02T06-17-38.337900.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_02_02T06_17_38.337900", "path": ["**/details_harness|hendrycksTest-virology|5_2024-02-02T06-17-38.337900.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-02-02T06-17-38.337900.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_02_02T06_17_38.337900", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-02-02T06-17-38.337900.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-02-02T06-17-38.337900.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_02_02T06_17_38.337900", "path": ["**/details_harness|truthfulqa:mc|0_2024-02-02T06-17-38.337900.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-02-02T06-17-38.337900.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_02_02T06_17_38.337900", "path": ["**/details_harness|winogrande|5_2024-02-02T06-17-38.337900.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-02-02T06-17-38.337900.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_02_02T06_17_38.337900", "path": ["results_2024-02-02T06-17-38.337900.parquet"]}, {"split": "latest", "path": ["results_2024-02-02T06-17-38.337900.parquet"]}]}]}
2024-02-02T06:20:32+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of codellama/CodeLlama-70b-Python-hf Dataset automatically created during the evaluation run of model codellama/CodeLlama-70b-Python-hf on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-02-02T06:17:38.337900(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of codellama/CodeLlama-70b-Python-hf\n\n\n\nDataset automatically created during the evaluation run of model codellama/CodeLlama-70b-Python-hf on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-02-02T06:17:38.337900(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of codellama/CodeLlama-70b-Python-hf\n\n\n\nDataset automatically created during the evaluation run of model codellama/CodeLlama-70b-Python-hf on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-02-02T06:17:38.337900(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
e0d9f622aff9c8633d8559cf510794647412177f
# Bangumi Image Base of Plastic Memories This is the image base of bangumi Plastic Memories, we detected 20 characters, 2364 images in total. The full dataset is [here](all.zip). **Please note that these image bases are not guaranteed to be 100% cleaned, they may be noisy actual.** If you intend to manually train models using this dataset, we recommend performing necessary preprocessing on the downloaded dataset to eliminate potential noisy samples (approximately 1% probability). Here is the characters' preview: | # | Images | Download | Preview 1 | Preview 2 | Preview 3 | Preview 4 | Preview 5 | Preview 6 | Preview 7 | Preview 8 | |:------|---------:|:---------------------------|:-------------------------------|:-------------------------------|:-------------------------------|:-------------------------------|:-------------------------------|:-------------------------------|:-------------------------------|:-------------------------------| | 0 | 684 | [Download](0/dataset.zip) | ![preview 1](0/preview_1.png) | ![preview 2](0/preview_2.png) | ![preview 3](0/preview_3.png) | ![preview 4](0/preview_4.png) | ![preview 5](0/preview_5.png) | ![preview 6](0/preview_6.png) | ![preview 7](0/preview_7.png) | ![preview 8](0/preview_8.png) | | 1 | 53 | [Download](1/dataset.zip) | ![preview 1](1/preview_1.png) | ![preview 2](1/preview_2.png) | ![preview 3](1/preview_3.png) | ![preview 4](1/preview_4.png) | ![preview 5](1/preview_5.png) | ![preview 6](1/preview_6.png) | ![preview 7](1/preview_7.png) | ![preview 8](1/preview_8.png) | | 2 | 40 | [Download](2/dataset.zip) | ![preview 1](2/preview_1.png) | ![preview 2](2/preview_2.png) | ![preview 3](2/preview_3.png) | ![preview 4](2/preview_4.png) | ![preview 5](2/preview_5.png) | ![preview 6](2/preview_6.png) | ![preview 7](2/preview_7.png) | ![preview 8](2/preview_8.png) | | 3 | 56 | [Download](3/dataset.zip) | ![preview 1](3/preview_1.png) | ![preview 2](3/preview_2.png) | ![preview 3](3/preview_3.png) | ![preview 4](3/preview_4.png) | ![preview 5](3/preview_5.png) | ![preview 6](3/preview_6.png) | ![preview 7](3/preview_7.png) | ![preview 8](3/preview_8.png) | | 4 | 57 | [Download](4/dataset.zip) | ![preview 1](4/preview_1.png) | ![preview 2](4/preview_2.png) | ![preview 3](4/preview_3.png) | ![preview 4](4/preview_4.png) | ![preview 5](4/preview_5.png) | ![preview 6](4/preview_6.png) | ![preview 7](4/preview_7.png) | ![preview 8](4/preview_8.png) | | 5 | 59 | [Download](5/dataset.zip) | ![preview 1](5/preview_1.png) | ![preview 2](5/preview_2.png) | ![preview 3](5/preview_3.png) | ![preview 4](5/preview_4.png) | ![preview 5](5/preview_5.png) | ![preview 6](5/preview_6.png) | ![preview 7](5/preview_7.png) | ![preview 8](5/preview_8.png) | | 6 | 23 | [Download](6/dataset.zip) | ![preview 1](6/preview_1.png) | ![preview 2](6/preview_2.png) | ![preview 3](6/preview_3.png) | ![preview 4](6/preview_4.png) | ![preview 5](6/preview_5.png) | ![preview 6](6/preview_6.png) | ![preview 7](6/preview_7.png) | ![preview 8](6/preview_8.png) | | 7 | 13 | [Download](7/dataset.zip) | ![preview 1](7/preview_1.png) | ![preview 2](7/preview_2.png) | ![preview 3](7/preview_3.png) | ![preview 4](7/preview_4.png) | ![preview 5](7/preview_5.png) | ![preview 6](7/preview_6.png) | ![preview 7](7/preview_7.png) | ![preview 8](7/preview_8.png) | | 8 | 49 | [Download](8/dataset.zip) | ![preview 1](8/preview_1.png) | ![preview 2](8/preview_2.png) | ![preview 3](8/preview_3.png) | ![preview 4](8/preview_4.png) | ![preview 5](8/preview_5.png) | ![preview 6](8/preview_6.png) | ![preview 7](8/preview_7.png) | ![preview 8](8/preview_8.png) | | 9 | 16 | [Download](9/dataset.zip) | ![preview 1](9/preview_1.png) | ![preview 2](9/preview_2.png) | ![preview 3](9/preview_3.png) | ![preview 4](9/preview_4.png) | ![preview 5](9/preview_5.png) | ![preview 6](9/preview_6.png) | ![preview 7](9/preview_7.png) | ![preview 8](9/preview_8.png) | | 10 | 5 | [Download](10/dataset.zip) | ![preview 1](10/preview_1.png) | ![preview 2](10/preview_2.png) | ![preview 3](10/preview_3.png) | ![preview 4](10/preview_4.png) | ![preview 5](10/preview_5.png) | N/A | N/A | N/A | | 11 | 169 | [Download](11/dataset.zip) | ![preview 1](11/preview_1.png) | ![preview 2](11/preview_2.png) | ![preview 3](11/preview_3.png) | ![preview 4](11/preview_4.png) | ![preview 5](11/preview_5.png) | ![preview 6](11/preview_6.png) | ![preview 7](11/preview_7.png) | ![preview 8](11/preview_8.png) | | 12 | 50 | [Download](12/dataset.zip) | ![preview 1](12/preview_1.png) | ![preview 2](12/preview_2.png) | ![preview 3](12/preview_3.png) | ![preview 4](12/preview_4.png) | ![preview 5](12/preview_5.png) | ![preview 6](12/preview_6.png) | ![preview 7](12/preview_7.png) | ![preview 8](12/preview_8.png) | | 13 | 33 | [Download](13/dataset.zip) | ![preview 1](13/preview_1.png) | ![preview 2](13/preview_2.png) | ![preview 3](13/preview_3.png) | ![preview 4](13/preview_4.png) | ![preview 5](13/preview_5.png) | ![preview 6](13/preview_6.png) | ![preview 7](13/preview_7.png) | ![preview 8](13/preview_8.png) | | 14 | 139 | [Download](14/dataset.zip) | ![preview 1](14/preview_1.png) | ![preview 2](14/preview_2.png) | ![preview 3](14/preview_3.png) | ![preview 4](14/preview_4.png) | ![preview 5](14/preview_5.png) | ![preview 6](14/preview_6.png) | ![preview 7](14/preview_7.png) | ![preview 8](14/preview_8.png) | | 15 | 14 | [Download](15/dataset.zip) | ![preview 1](15/preview_1.png) | ![preview 2](15/preview_2.png) | ![preview 3](15/preview_3.png) | ![preview 4](15/preview_4.png) | ![preview 5](15/preview_5.png) | ![preview 6](15/preview_6.png) | ![preview 7](15/preview_7.png) | ![preview 8](15/preview_8.png) | | 16 | 66 | [Download](16/dataset.zip) | ![preview 1](16/preview_1.png) | ![preview 2](16/preview_2.png) | ![preview 3](16/preview_3.png) | ![preview 4](16/preview_4.png) | ![preview 5](16/preview_5.png) | ![preview 6](16/preview_6.png) | ![preview 7](16/preview_7.png) | ![preview 8](16/preview_8.png) | | 17 | 72 | [Download](17/dataset.zip) | ![preview 1](17/preview_1.png) | ![preview 2](17/preview_2.png) | ![preview 3](17/preview_3.png) | ![preview 4](17/preview_4.png) | ![preview 5](17/preview_5.png) | ![preview 6](17/preview_6.png) | ![preview 7](17/preview_7.png) | ![preview 8](17/preview_8.png) | | 18 | 609 | [Download](18/dataset.zip) | ![preview 1](18/preview_1.png) | ![preview 2](18/preview_2.png) | ![preview 3](18/preview_3.png) | ![preview 4](18/preview_4.png) | ![preview 5](18/preview_5.png) | ![preview 6](18/preview_6.png) | ![preview 7](18/preview_7.png) | ![preview 8](18/preview_8.png) | | noise | 157 | [Download](-1/dataset.zip) | ![preview 1](-1/preview_1.png) | ![preview 2](-1/preview_2.png) | ![preview 3](-1/preview_3.png) | ![preview 4](-1/preview_4.png) | ![preview 5](-1/preview_5.png) | ![preview 6](-1/preview_6.png) | ![preview 7](-1/preview_7.png) | ![preview 8](-1/preview_8.png) |
BangumiBase/plasticmemories
[ "size_categories:1K<n<10K", "license:mit", "art", "region:us" ]
2024-02-02T06:23:57+00:00
{"license": "mit", "size_categories": ["1K<n<10K"], "tags": ["art"]}
2024-02-02T07:37:59+00:00
[]
[]
TAGS #size_categories-1K<n<10K #license-mit #art #region-us
Bangumi Image Base of Plastic Memories ====================================== This is the image base of bangumi Plastic Memories, we detected 20 characters, 2364 images in total. The full dataset is here. Please note that these image bases are not guaranteed to be 100% cleaned, they may be noisy actual. If you intend to manually train models using this dataset, we recommend performing necessary preprocessing on the downloaded dataset to eliminate potential noisy samples (approximately 1% probability). Here is the characters' preview:
[]
[ "TAGS\n#size_categories-1K<n<10K #license-mit #art #region-us \n" ]
61aef94d01f059786cd908a2c73e7f42c8767049
# Dataset Card for Evaluation run of xriminact/TarsDolly <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [xriminact/TarsDolly](https://huggingface.co/xriminact/TarsDolly) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_xriminact__TarsDolly", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-02-02T06:25:30.533704](https://huggingface.co/datasets/open-llm-leaderboard/details_xriminact__TarsDolly/blob/main/results_2024-02-02T06-25-30.533704.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.559209839212876, "acc_stderr": 0.03381204531755427, "acc_norm": 0.5674682510835656, "acc_norm_stderr": 0.034594842292962974, "mc1": 0.28518971848225216, "mc1_stderr": 0.015805827874454892, "mc2": 0.4229222915105855, "mc2_stderr": 0.014867729775892005 }, "harness|arc:challenge|25": { "acc": 0.5520477815699659, "acc_stderr": 0.014532011498211672, "acc_norm": 0.5930034129692833, "acc_norm_stderr": 0.014356399418009128 }, "harness|hellaswag|10": { "acc": 0.6265684126667994, "acc_stderr": 0.004827266662144028, "acc_norm": 0.8184624576777534, "acc_norm_stderr": 0.0038467514306295414 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.32, "acc_stderr": 0.046882617226215034, "acc_norm": 0.32, "acc_norm_stderr": 0.046882617226215034 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.4888888888888889, "acc_stderr": 0.04318275491977976, "acc_norm": 0.4888888888888889, "acc_norm_stderr": 0.04318275491977976 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.5657894736842105, "acc_stderr": 0.04033565667848319, "acc_norm": 0.5657894736842105, "acc_norm_stderr": 0.04033565667848319 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.48, "acc_stderr": 0.050211673156867795, "acc_norm": 0.48, "acc_norm_stderr": 0.050211673156867795 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.6226415094339622, "acc_stderr": 0.029832808114796005, "acc_norm": 0.6226415094339622, "acc_norm_stderr": 0.029832808114796005 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.6597222222222222, "acc_stderr": 0.039621355734862175, "acc_norm": 0.6597222222222222, "acc_norm_stderr": 0.039621355734862175 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.41, "acc_stderr": 0.049431107042371025, "acc_norm": 0.41, "acc_norm_stderr": 0.049431107042371025 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.43, "acc_stderr": 0.049756985195624284, "acc_norm": 0.43, "acc_norm_stderr": 0.049756985195624284 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.36, "acc_stderr": 0.04824181513244218, "acc_norm": 0.36, "acc_norm_stderr": 0.04824181513244218 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.5028901734104047, "acc_stderr": 0.038124005659748335, "acc_norm": 0.5028901734104047, "acc_norm_stderr": 0.038124005659748335 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.3333333333333333, "acc_stderr": 0.04690650298201942, "acc_norm": 0.3333333333333333, "acc_norm_stderr": 0.04690650298201942 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.67, "acc_stderr": 0.04725815626252607, "acc_norm": 0.67, "acc_norm_stderr": 0.04725815626252607 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.5234042553191489, "acc_stderr": 0.03265019475033582, "acc_norm": 0.5234042553191489, "acc_norm_stderr": 0.03265019475033582 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.41228070175438597, "acc_stderr": 0.046306532033665956, "acc_norm": 0.41228070175438597, "acc_norm_stderr": 0.046306532033665956 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.5586206896551724, "acc_stderr": 0.04137931034482758, "acc_norm": 0.5586206896551724, "acc_norm_stderr": 0.04137931034482758 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.3941798941798942, "acc_stderr": 0.02516798233389414, "acc_norm": 0.3941798941798942, "acc_norm_stderr": 0.02516798233389414 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.3412698412698413, "acc_stderr": 0.04240799327574925, "acc_norm": 0.3412698412698413, "acc_norm_stderr": 0.04240799327574925 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.28, "acc_stderr": 0.045126085985421276, "acc_norm": 0.28, "acc_norm_stderr": 0.045126085985421276 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.6774193548387096, "acc_stderr": 0.02659308451657226, "acc_norm": 0.6774193548387096, "acc_norm_stderr": 0.02659308451657226 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.45320197044334976, "acc_stderr": 0.03502544650845872, "acc_norm": 0.45320197044334976, "acc_norm_stderr": 0.03502544650845872 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.59, "acc_stderr": 0.04943110704237102, "acc_norm": 0.59, "acc_norm_stderr": 0.04943110704237102 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.6727272727272727, "acc_stderr": 0.03663974994391244, "acc_norm": 0.6727272727272727, "acc_norm_stderr": 0.03663974994391244 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.6767676767676768, "acc_stderr": 0.03332299921070646, "acc_norm": 0.6767676767676768, "acc_norm_stderr": 0.03332299921070646 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.7772020725388601, "acc_stderr": 0.03003114797764154, "acc_norm": 0.7772020725388601, "acc_norm_stderr": 0.03003114797764154 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.5692307692307692, "acc_stderr": 0.025106820660539753, "acc_norm": 0.5692307692307692, "acc_norm_stderr": 0.025106820660539753 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.2962962962962963, "acc_stderr": 0.027840811495871927, "acc_norm": 0.2962962962962963, "acc_norm_stderr": 0.027840811495871927 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.6302521008403361, "acc_stderr": 0.03135709599613591, "acc_norm": 0.6302521008403361, "acc_norm_stderr": 0.03135709599613591 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.3509933774834437, "acc_stderr": 0.03896981964257375, "acc_norm": 0.3509933774834437, "acc_norm_stderr": 0.03896981964257375 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.6788990825688074, "acc_stderr": 0.02001814977273375, "acc_norm": 0.6788990825688074, "acc_norm_stderr": 0.02001814977273375 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.4537037037037037, "acc_stderr": 0.03395322726375797, "acc_norm": 0.4537037037037037, "acc_norm_stderr": 0.03395322726375797 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.7303921568627451, "acc_stderr": 0.031145570659486782, "acc_norm": 0.7303921568627451, "acc_norm_stderr": 0.031145570659486782 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.7088607594936709, "acc_stderr": 0.029571601065753374, "acc_norm": 0.7088607594936709, "acc_norm_stderr": 0.029571601065753374 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.6457399103139013, "acc_stderr": 0.03210062154134986, "acc_norm": 0.6457399103139013, "acc_norm_stderr": 0.03210062154134986 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.6641221374045801, "acc_stderr": 0.041423137719966634, "acc_norm": 0.6641221374045801, "acc_norm_stderr": 0.041423137719966634 }, "harness|hendrycksTest-international_law|5": { "acc": 0.7024793388429752, "acc_stderr": 0.04173349148083499, "acc_norm": 0.7024793388429752, "acc_norm_stderr": 0.04173349148083499 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.6851851851851852, "acc_stderr": 0.04489931073591312, "acc_norm": 0.6851851851851852, "acc_norm_stderr": 0.04489931073591312 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.6748466257668712, "acc_stderr": 0.036803503712864616, "acc_norm": 0.6748466257668712, "acc_norm_stderr": 0.036803503712864616 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.375, "acc_stderr": 0.04595091388086298, "acc_norm": 0.375, "acc_norm_stderr": 0.04595091388086298 }, "harness|hendrycksTest-management|5": { "acc": 0.7669902912621359, "acc_stderr": 0.04185832598928315, "acc_norm": 0.7669902912621359, "acc_norm_stderr": 0.04185832598928315 }, "harness|hendrycksTest-marketing|5": { "acc": 0.782051282051282, "acc_stderr": 0.02704685763071668, "acc_norm": 0.782051282051282, "acc_norm_stderr": 0.02704685763071668 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.65, "acc_stderr": 0.0479372485441102, "acc_norm": 0.65, "acc_norm_stderr": 0.0479372485441102 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.7547892720306514, "acc_stderr": 0.015384352284543937, "acc_norm": 0.7547892720306514, "acc_norm_stderr": 0.015384352284543937 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.6040462427745664, "acc_stderr": 0.02632981334194624, "acc_norm": 0.6040462427745664, "acc_norm_stderr": 0.02632981334194624 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.31843575418994413, "acc_stderr": 0.015581008080360276, "acc_norm": 0.31843575418994413, "acc_norm_stderr": 0.015581008080360276 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.6045751633986928, "acc_stderr": 0.02799672318063145, "acc_norm": 0.6045751633986928, "acc_norm_stderr": 0.02799672318063145 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.6366559485530546, "acc_stderr": 0.027316847674192707, "acc_norm": 0.6366559485530546, "acc_norm_stderr": 0.027316847674192707 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.6141975308641975, "acc_stderr": 0.027085401226132143, "acc_norm": 0.6141975308641975, "acc_norm_stderr": 0.027085401226132143 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.4326241134751773, "acc_stderr": 0.029555454236778845, "acc_norm": 0.4326241134751773, "acc_norm_stderr": 0.029555454236778845 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.4282920469361147, "acc_stderr": 0.012638223880313172, "acc_norm": 0.4282920469361147, "acc_norm_stderr": 0.012638223880313172 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.5735294117647058, "acc_stderr": 0.03004261583271487, "acc_norm": 0.5735294117647058, "acc_norm_stderr": 0.03004261583271487 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.5555555555555556, "acc_stderr": 0.020102583895887184, "acc_norm": 0.5555555555555556, "acc_norm_stderr": 0.020102583895887184 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.5818181818181818, "acc_stderr": 0.047245774057315726, "acc_norm": 0.5818181818181818, "acc_norm_stderr": 0.047245774057315726 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.5836734693877551, "acc_stderr": 0.03155782816556165, "acc_norm": 0.5836734693877551, "acc_norm_stderr": 0.03155782816556165 }, "harness|hendrycksTest-sociology|5": { "acc": 0.7711442786069652, "acc_stderr": 0.029705284056772436, "acc_norm": 0.7711442786069652, "acc_norm_stderr": 0.029705284056772436 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.82, "acc_stderr": 0.03861229196653694, "acc_norm": 0.82, "acc_norm_stderr": 0.03861229196653694 }, "harness|hendrycksTest-virology|5": { "acc": 0.4759036144578313, "acc_stderr": 0.03887971849597264, "acc_norm": 0.4759036144578313, "acc_norm_stderr": 0.03887971849597264 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.7485380116959064, "acc_stderr": 0.033275044238468436, "acc_norm": 0.7485380116959064, "acc_norm_stderr": 0.033275044238468436 }, "harness|truthfulqa:mc|0": { "mc1": 0.28518971848225216, "mc1_stderr": 0.015805827874454892, "mc2": 0.4229222915105855, "mc2_stderr": 0.014867729775892005 }, "harness|winogrande|5": { "acc": 0.7624309392265194, "acc_stderr": 0.011961298905803152 }, "harness|gsm8k|5": { "acc": 0.10159211523881728, "acc_stderr": 0.008321642868474836 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_xriminact__TarsDolly
[ "region:us" ]
2024-02-02T06:27:50+00:00
{"pretty_name": "Evaluation run of xriminact/TarsDolly", "dataset_summary": "Dataset automatically created during the evaluation run of model [xriminact/TarsDolly](https://huggingface.co/xriminact/TarsDolly) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_xriminact__TarsDolly\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-02-02T06:25:30.533704](https://huggingface.co/datasets/open-llm-leaderboard/details_xriminact__TarsDolly/blob/main/results_2024-02-02T06-25-30.533704.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.559209839212876,\n \"acc_stderr\": 0.03381204531755427,\n \"acc_norm\": 0.5674682510835656,\n \"acc_norm_stderr\": 0.034594842292962974,\n \"mc1\": 0.28518971848225216,\n \"mc1_stderr\": 0.015805827874454892,\n \"mc2\": 0.4229222915105855,\n \"mc2_stderr\": 0.014867729775892005\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.5520477815699659,\n \"acc_stderr\": 0.014532011498211672,\n \"acc_norm\": 0.5930034129692833,\n \"acc_norm_stderr\": 0.014356399418009128\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.6265684126667994,\n \"acc_stderr\": 0.004827266662144028,\n \"acc_norm\": 0.8184624576777534,\n \"acc_norm_stderr\": 0.0038467514306295414\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.32,\n \"acc_stderr\": 0.046882617226215034,\n \"acc_norm\": 0.32,\n \"acc_norm_stderr\": 0.046882617226215034\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.4888888888888889,\n \"acc_stderr\": 0.04318275491977976,\n \"acc_norm\": 0.4888888888888889,\n \"acc_norm_stderr\": 0.04318275491977976\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.5657894736842105,\n \"acc_stderr\": 0.04033565667848319,\n \"acc_norm\": 0.5657894736842105,\n \"acc_norm_stderr\": 0.04033565667848319\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.48,\n \"acc_stderr\": 0.050211673156867795,\n \"acc_norm\": 0.48,\n \"acc_norm_stderr\": 0.050211673156867795\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.6226415094339622,\n \"acc_stderr\": 0.029832808114796005,\n \"acc_norm\": 0.6226415094339622,\n \"acc_norm_stderr\": 0.029832808114796005\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.6597222222222222,\n \"acc_stderr\": 0.039621355734862175,\n \"acc_norm\": 0.6597222222222222,\n \"acc_norm_stderr\": 0.039621355734862175\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.41,\n \"acc_stderr\": 0.049431107042371025,\n \"acc_norm\": 0.41,\n \"acc_norm_stderr\": 0.049431107042371025\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.43,\n \"acc_stderr\": 0.049756985195624284,\n \"acc_norm\": 0.43,\n \"acc_norm_stderr\": 0.049756985195624284\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.36,\n \"acc_stderr\": 0.04824181513244218,\n \"acc_norm\": 0.36,\n \"acc_norm_stderr\": 0.04824181513244218\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.5028901734104047,\n \"acc_stderr\": 0.038124005659748335,\n \"acc_norm\": 0.5028901734104047,\n \"acc_norm_stderr\": 0.038124005659748335\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.3333333333333333,\n \"acc_stderr\": 0.04690650298201942,\n \"acc_norm\": 0.3333333333333333,\n \"acc_norm_stderr\": 0.04690650298201942\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.67,\n \"acc_stderr\": 0.04725815626252607,\n \"acc_norm\": 0.67,\n \"acc_norm_stderr\": 0.04725815626252607\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.5234042553191489,\n \"acc_stderr\": 0.03265019475033582,\n \"acc_norm\": 0.5234042553191489,\n \"acc_norm_stderr\": 0.03265019475033582\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.41228070175438597,\n \"acc_stderr\": 0.046306532033665956,\n \"acc_norm\": 0.41228070175438597,\n \"acc_norm_stderr\": 0.046306532033665956\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.5586206896551724,\n \"acc_stderr\": 0.04137931034482758,\n \"acc_norm\": 0.5586206896551724,\n \"acc_norm_stderr\": 0.04137931034482758\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.3941798941798942,\n \"acc_stderr\": 0.02516798233389414,\n \"acc_norm\": 0.3941798941798942,\n \"acc_norm_stderr\": 0.02516798233389414\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.3412698412698413,\n \"acc_stderr\": 0.04240799327574925,\n \"acc_norm\": 0.3412698412698413,\n \"acc_norm_stderr\": 0.04240799327574925\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.28,\n \"acc_stderr\": 0.045126085985421276,\n \"acc_norm\": 0.28,\n \"acc_norm_stderr\": 0.045126085985421276\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.6774193548387096,\n \"acc_stderr\": 0.02659308451657226,\n \"acc_norm\": 0.6774193548387096,\n \"acc_norm_stderr\": 0.02659308451657226\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.45320197044334976,\n \"acc_stderr\": 0.03502544650845872,\n \"acc_norm\": 0.45320197044334976,\n \"acc_norm_stderr\": 0.03502544650845872\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.59,\n \"acc_stderr\": 0.04943110704237102,\n \"acc_norm\": 0.59,\n \"acc_norm_stderr\": 0.04943110704237102\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.6727272727272727,\n \"acc_stderr\": 0.03663974994391244,\n \"acc_norm\": 0.6727272727272727,\n \"acc_norm_stderr\": 0.03663974994391244\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.6767676767676768,\n \"acc_stderr\": 0.03332299921070646,\n \"acc_norm\": 0.6767676767676768,\n \"acc_norm_stderr\": 0.03332299921070646\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.7772020725388601,\n \"acc_stderr\": 0.03003114797764154,\n \"acc_norm\": 0.7772020725388601,\n \"acc_norm_stderr\": 0.03003114797764154\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.5692307692307692,\n \"acc_stderr\": 0.025106820660539753,\n \"acc_norm\": 0.5692307692307692,\n \"acc_norm_stderr\": 0.025106820660539753\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.2962962962962963,\n \"acc_stderr\": 0.027840811495871927,\n \"acc_norm\": 0.2962962962962963,\n \"acc_norm_stderr\": 0.027840811495871927\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.6302521008403361,\n \"acc_stderr\": 0.03135709599613591,\n \"acc_norm\": 0.6302521008403361,\n \"acc_norm_stderr\": 0.03135709599613591\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.3509933774834437,\n \"acc_stderr\": 0.03896981964257375,\n \"acc_norm\": 0.3509933774834437,\n \"acc_norm_stderr\": 0.03896981964257375\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.6788990825688074,\n \"acc_stderr\": 0.02001814977273375,\n \"acc_norm\": 0.6788990825688074,\n \"acc_norm_stderr\": 0.02001814977273375\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.4537037037037037,\n \"acc_stderr\": 0.03395322726375797,\n \"acc_norm\": 0.4537037037037037,\n \"acc_norm_stderr\": 0.03395322726375797\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.7303921568627451,\n \"acc_stderr\": 0.031145570659486782,\n \"acc_norm\": 0.7303921568627451,\n \"acc_norm_stderr\": 0.031145570659486782\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.7088607594936709,\n \"acc_stderr\": 0.029571601065753374,\n \"acc_norm\": 0.7088607594936709,\n \"acc_norm_stderr\": 0.029571601065753374\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.6457399103139013,\n \"acc_stderr\": 0.03210062154134986,\n \"acc_norm\": 0.6457399103139013,\n \"acc_norm_stderr\": 0.03210062154134986\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.6641221374045801,\n \"acc_stderr\": 0.041423137719966634,\n \"acc_norm\": 0.6641221374045801,\n \"acc_norm_stderr\": 0.041423137719966634\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.7024793388429752,\n \"acc_stderr\": 0.04173349148083499,\n \"acc_norm\": 0.7024793388429752,\n \"acc_norm_stderr\": 0.04173349148083499\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.6851851851851852,\n \"acc_stderr\": 0.04489931073591312,\n \"acc_norm\": 0.6851851851851852,\n \"acc_norm_stderr\": 0.04489931073591312\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.6748466257668712,\n \"acc_stderr\": 0.036803503712864616,\n \"acc_norm\": 0.6748466257668712,\n \"acc_norm_stderr\": 0.036803503712864616\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.375,\n \"acc_stderr\": 0.04595091388086298,\n \"acc_norm\": 0.375,\n \"acc_norm_stderr\": 0.04595091388086298\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.7669902912621359,\n \"acc_stderr\": 0.04185832598928315,\n \"acc_norm\": 0.7669902912621359,\n \"acc_norm_stderr\": 0.04185832598928315\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.782051282051282,\n \"acc_stderr\": 0.02704685763071668,\n \"acc_norm\": 0.782051282051282,\n \"acc_norm_stderr\": 0.02704685763071668\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.65,\n \"acc_stderr\": 0.0479372485441102,\n \"acc_norm\": 0.65,\n \"acc_norm_stderr\": 0.0479372485441102\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.7547892720306514,\n \"acc_stderr\": 0.015384352284543937,\n \"acc_norm\": 0.7547892720306514,\n \"acc_norm_stderr\": 0.015384352284543937\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.6040462427745664,\n \"acc_stderr\": 0.02632981334194624,\n \"acc_norm\": 0.6040462427745664,\n \"acc_norm_stderr\": 0.02632981334194624\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.31843575418994413,\n \"acc_stderr\": 0.015581008080360276,\n \"acc_norm\": 0.31843575418994413,\n \"acc_norm_stderr\": 0.015581008080360276\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.6045751633986928,\n \"acc_stderr\": 0.02799672318063145,\n \"acc_norm\": 0.6045751633986928,\n \"acc_norm_stderr\": 0.02799672318063145\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.6366559485530546,\n \"acc_stderr\": 0.027316847674192707,\n \"acc_norm\": 0.6366559485530546,\n \"acc_norm_stderr\": 0.027316847674192707\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.6141975308641975,\n \"acc_stderr\": 0.027085401226132143,\n \"acc_norm\": 0.6141975308641975,\n \"acc_norm_stderr\": 0.027085401226132143\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.4326241134751773,\n \"acc_stderr\": 0.029555454236778845,\n \"acc_norm\": 0.4326241134751773,\n \"acc_norm_stderr\": 0.029555454236778845\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.4282920469361147,\n \"acc_stderr\": 0.012638223880313172,\n \"acc_norm\": 0.4282920469361147,\n \"acc_norm_stderr\": 0.012638223880313172\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.5735294117647058,\n \"acc_stderr\": 0.03004261583271487,\n \"acc_norm\": 0.5735294117647058,\n \"acc_norm_stderr\": 0.03004261583271487\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.5555555555555556,\n \"acc_stderr\": 0.020102583895887184,\n \"acc_norm\": 0.5555555555555556,\n \"acc_norm_stderr\": 0.020102583895887184\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.5818181818181818,\n \"acc_stderr\": 0.047245774057315726,\n \"acc_norm\": 0.5818181818181818,\n \"acc_norm_stderr\": 0.047245774057315726\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.5836734693877551,\n \"acc_stderr\": 0.03155782816556165,\n \"acc_norm\": 0.5836734693877551,\n \"acc_norm_stderr\": 0.03155782816556165\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.7711442786069652,\n \"acc_stderr\": 0.029705284056772436,\n \"acc_norm\": 0.7711442786069652,\n \"acc_norm_stderr\": 0.029705284056772436\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.82,\n \"acc_stderr\": 0.03861229196653694,\n \"acc_norm\": 0.82,\n \"acc_norm_stderr\": 0.03861229196653694\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.4759036144578313,\n \"acc_stderr\": 0.03887971849597264,\n \"acc_norm\": 0.4759036144578313,\n \"acc_norm_stderr\": 0.03887971849597264\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.7485380116959064,\n \"acc_stderr\": 0.033275044238468436,\n \"acc_norm\": 0.7485380116959064,\n \"acc_norm_stderr\": 0.033275044238468436\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.28518971848225216,\n \"mc1_stderr\": 0.015805827874454892,\n \"mc2\": 0.4229222915105855,\n \"mc2_stderr\": 0.014867729775892005\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.7624309392265194,\n \"acc_stderr\": 0.011961298905803152\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.10159211523881728,\n \"acc_stderr\": 0.008321642868474836\n }\n}\n```", "repo_url": "https://huggingface.co/xriminact/TarsDolly", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_02_02T06_25_30.533704", "path": ["**/details_harness|arc:challenge|25_2024-02-02T06-25-30.533704.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-02-02T06-25-30.533704.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_02_02T06_25_30.533704", "path": ["**/details_harness|gsm8k|5_2024-02-02T06-25-30.533704.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-02-02T06-25-30.533704.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_02_02T06_25_30.533704", "path": ["**/details_harness|hellaswag|10_2024-02-02T06-25-30.533704.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-02-02T06-25-30.533704.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_02_02T06_25_30.533704", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-02T06-25-30.533704.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-02-02T06-25-30.533704.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-02-02T06-25-30.533704.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-02-02T06-25-30.533704.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-02T06-25-30.533704.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-02-02T06-25-30.533704.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-02-02T06-25-30.533704.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-02-02T06-25-30.533704.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-02-02T06-25-30.533704.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-02-02T06-25-30.533704.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-02-02T06-25-30.533704.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-02-02T06-25-30.533704.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-02T06-25-30.533704.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-02-02T06-25-30.533704.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-02T06-25-30.533704.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-02T06-25-30.533704.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-02-02T06-25-30.533704.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-02-02T06-25-30.533704.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-02-02T06-25-30.533704.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-02T06-25-30.533704.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-02T06-25-30.533704.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-02T06-25-30.533704.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-02-02T06-25-30.533704.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-02T06-25-30.533704.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-02T06-25-30.533704.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-02T06-25-30.533704.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-02T06-25-30.533704.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-02-02T06-25-30.533704.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-02T06-25-30.533704.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-02T06-25-30.533704.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-02T06-25-30.533704.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-02T06-25-30.533704.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-02-02T06-25-30.533704.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-02-02T06-25-30.533704.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-02-02T06-25-30.533704.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-02-02T06-25-30.533704.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-02T06-25-30.533704.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-02-02T06-25-30.533704.parquet", "**/details_harness|hendrycksTest-management|5_2024-02-02T06-25-30.533704.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-02-02T06-25-30.533704.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-02-02T06-25-30.533704.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-02-02T06-25-30.533704.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-02-02T06-25-30.533704.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-02T06-25-30.533704.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-02-02T06-25-30.533704.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-02-02T06-25-30.533704.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-02-02T06-25-30.533704.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-02-02T06-25-30.533704.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-02-02T06-25-30.533704.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-02-02T06-25-30.533704.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-02-02T06-25-30.533704.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-02-02T06-25-30.533704.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-02-02T06-25-30.533704.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-02-02T06-25-30.533704.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-02T06-25-30.533704.parquet", "**/details_harness|hendrycksTest-virology|5_2024-02-02T06-25-30.533704.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-02-02T06-25-30.533704.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-02T06-25-30.533704.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-02-02T06-25-30.533704.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-02-02T06-25-30.533704.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-02-02T06-25-30.533704.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-02T06-25-30.533704.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-02-02T06-25-30.533704.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-02-02T06-25-30.533704.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-02-02T06-25-30.533704.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-02-02T06-25-30.533704.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-02-02T06-25-30.533704.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-02-02T06-25-30.533704.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-02-02T06-25-30.533704.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-02T06-25-30.533704.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-02-02T06-25-30.533704.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-02T06-25-30.533704.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-02T06-25-30.533704.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-02-02T06-25-30.533704.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-02-02T06-25-30.533704.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-02-02T06-25-30.533704.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-02T06-25-30.533704.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-02T06-25-30.533704.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-02T06-25-30.533704.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-02-02T06-25-30.533704.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-02T06-25-30.533704.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-02T06-25-30.533704.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-02T06-25-30.533704.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-02T06-25-30.533704.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-02-02T06-25-30.533704.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-02T06-25-30.533704.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-02T06-25-30.533704.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-02T06-25-30.533704.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-02T06-25-30.533704.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-02-02T06-25-30.533704.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-02-02T06-25-30.533704.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-02-02T06-25-30.533704.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-02-02T06-25-30.533704.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-02T06-25-30.533704.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-02-02T06-25-30.533704.parquet", "**/details_harness|hendrycksTest-management|5_2024-02-02T06-25-30.533704.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-02-02T06-25-30.533704.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-02-02T06-25-30.533704.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-02-02T06-25-30.533704.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-02-02T06-25-30.533704.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-02T06-25-30.533704.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-02-02T06-25-30.533704.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-02-02T06-25-30.533704.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-02-02T06-25-30.533704.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-02-02T06-25-30.533704.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-02-02T06-25-30.533704.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-02-02T06-25-30.533704.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-02-02T06-25-30.533704.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-02-02T06-25-30.533704.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-02-02T06-25-30.533704.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-02-02T06-25-30.533704.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-02T06-25-30.533704.parquet", "**/details_harness|hendrycksTest-virology|5_2024-02-02T06-25-30.533704.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-02-02T06-25-30.533704.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_02_02T06_25_30.533704", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-02T06-25-30.533704.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-02T06-25-30.533704.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_02_02T06_25_30.533704", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-02-02T06-25-30.533704.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-02-02T06-25-30.533704.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_02_02T06_25_30.533704", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-02-02T06-25-30.533704.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-02-02T06-25-30.533704.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_02_02T06_25_30.533704", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-02-02T06-25-30.533704.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-02-02T06-25-30.533704.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_02_02T06_25_30.533704", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-02T06-25-30.533704.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-02T06-25-30.533704.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_02_02T06_25_30.533704", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-02-02T06-25-30.533704.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-02-02T06-25-30.533704.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_02_02T06_25_30.533704", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-02-02T06-25-30.533704.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-02-02T06-25-30.533704.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_02_02T06_25_30.533704", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-02-02T06-25-30.533704.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-02-02T06-25-30.533704.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_02_02T06_25_30.533704", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-02-02T06-25-30.533704.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-02-02T06-25-30.533704.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_02_02T06_25_30.533704", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-02-02T06-25-30.533704.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-02-02T06-25-30.533704.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_02_02T06_25_30.533704", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-02-02T06-25-30.533704.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-02-02T06-25-30.533704.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_02_02T06_25_30.533704", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-02-02T06-25-30.533704.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-02-02T06-25-30.533704.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_02_02T06_25_30.533704", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-02T06-25-30.533704.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-02T06-25-30.533704.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_02_02T06_25_30.533704", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-02-02T06-25-30.533704.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-02-02T06-25-30.533704.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_02_02T06_25_30.533704", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-02T06-25-30.533704.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-02T06-25-30.533704.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_02_02T06_25_30.533704", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-02T06-25-30.533704.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-02T06-25-30.533704.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_02_02T06_25_30.533704", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-02-02T06-25-30.533704.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-02-02T06-25-30.533704.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_02_02T06_25_30.533704", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-02-02T06-25-30.533704.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-02-02T06-25-30.533704.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_02_02T06_25_30.533704", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-02-02T06-25-30.533704.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-02-02T06-25-30.533704.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_02_02T06_25_30.533704", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-02T06-25-30.533704.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-02T06-25-30.533704.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_02_02T06_25_30.533704", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-02T06-25-30.533704.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-02T06-25-30.533704.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_02_02T06_25_30.533704", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-02T06-25-30.533704.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-02T06-25-30.533704.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_02_02T06_25_30.533704", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-02-02T06-25-30.533704.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-02-02T06-25-30.533704.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_02_02T06_25_30.533704", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-02T06-25-30.533704.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-02T06-25-30.533704.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_02_02T06_25_30.533704", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-02T06-25-30.533704.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-02T06-25-30.533704.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_02_02T06_25_30.533704", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-02T06-25-30.533704.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-02T06-25-30.533704.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_02_02T06_25_30.533704", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-02T06-25-30.533704.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-02T06-25-30.533704.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_02_02T06_25_30.533704", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-02-02T06-25-30.533704.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-02-02T06-25-30.533704.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_02_02T06_25_30.533704", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-02T06-25-30.533704.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-02T06-25-30.533704.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_02_02T06_25_30.533704", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-02T06-25-30.533704.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-02T06-25-30.533704.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_02_02T06_25_30.533704", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-02T06-25-30.533704.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-02T06-25-30.533704.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_02_02T06_25_30.533704", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-02T06-25-30.533704.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-02T06-25-30.533704.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_02_02T06_25_30.533704", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-02-02T06-25-30.533704.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-02-02T06-25-30.533704.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_02_02T06_25_30.533704", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-02-02T06-25-30.533704.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-02-02T06-25-30.533704.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_02_02T06_25_30.533704", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-02-02T06-25-30.533704.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-02-02T06-25-30.533704.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_02_02T06_25_30.533704", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-02-02T06-25-30.533704.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-02-02T06-25-30.533704.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_02_02T06_25_30.533704", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-02T06-25-30.533704.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-02T06-25-30.533704.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_02_02T06_25_30.533704", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-02-02T06-25-30.533704.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-02-02T06-25-30.533704.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_02_02T06_25_30.533704", "path": ["**/details_harness|hendrycksTest-management|5_2024-02-02T06-25-30.533704.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-02-02T06-25-30.533704.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_02_02T06_25_30.533704", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-02-02T06-25-30.533704.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-02-02T06-25-30.533704.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_02_02T06_25_30.533704", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-02-02T06-25-30.533704.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-02-02T06-25-30.533704.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_02_02T06_25_30.533704", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-02-02T06-25-30.533704.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-02-02T06-25-30.533704.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_02_02T06_25_30.533704", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-02-02T06-25-30.533704.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-02-02T06-25-30.533704.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_02_02T06_25_30.533704", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-02T06-25-30.533704.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-02T06-25-30.533704.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_02_02T06_25_30.533704", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-02-02T06-25-30.533704.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-02-02T06-25-30.533704.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_02_02T06_25_30.533704", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-02-02T06-25-30.533704.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-02-02T06-25-30.533704.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_02_02T06_25_30.533704", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-02-02T06-25-30.533704.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-02-02T06-25-30.533704.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_02_02T06_25_30.533704", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-02-02T06-25-30.533704.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-02-02T06-25-30.533704.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_02_02T06_25_30.533704", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-02-02T06-25-30.533704.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-02-02T06-25-30.533704.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_02_02T06_25_30.533704", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-02-02T06-25-30.533704.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-02-02T06-25-30.533704.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_02_02T06_25_30.533704", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-02-02T06-25-30.533704.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-02-02T06-25-30.533704.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_02_02T06_25_30.533704", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-02-02T06-25-30.533704.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-02-02T06-25-30.533704.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_02_02T06_25_30.533704", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-02-02T06-25-30.533704.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-02-02T06-25-30.533704.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_02_02T06_25_30.533704", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-02-02T06-25-30.533704.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-02-02T06-25-30.533704.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_02_02T06_25_30.533704", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-02T06-25-30.533704.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-02T06-25-30.533704.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_02_02T06_25_30.533704", "path": ["**/details_harness|hendrycksTest-virology|5_2024-02-02T06-25-30.533704.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-02-02T06-25-30.533704.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_02_02T06_25_30.533704", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-02-02T06-25-30.533704.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-02-02T06-25-30.533704.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_02_02T06_25_30.533704", "path": ["**/details_harness|truthfulqa:mc|0_2024-02-02T06-25-30.533704.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-02-02T06-25-30.533704.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_02_02T06_25_30.533704", "path": ["**/details_harness|winogrande|5_2024-02-02T06-25-30.533704.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-02-02T06-25-30.533704.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_02_02T06_25_30.533704", "path": ["results_2024-02-02T06-25-30.533704.parquet"]}, {"split": "latest", "path": ["results_2024-02-02T06-25-30.533704.parquet"]}]}]}
2024-02-02T06:28:13+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of xriminact/TarsDolly Dataset automatically created during the evaluation run of model xriminact/TarsDolly on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-02-02T06:25:30.533704(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of xriminact/TarsDolly\n\n\n\nDataset automatically created during the evaluation run of model xriminact/TarsDolly on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-02-02T06:25:30.533704(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of xriminact/TarsDolly\n\n\n\nDataset automatically created during the evaluation run of model xriminact/TarsDolly on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-02-02T06:25:30.533704(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
56dc61ddcde735d24d162ff94f469b56078fa410
# Dataset Card for Evaluation run of codellama/CodeLlama-70b-hf <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [codellama/CodeLlama-70b-hf](https://huggingface.co/codellama/CodeLlama-70b-hf) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_codellama__CodeLlama-70b-hf", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-02-02T06:27:09.209983](https://huggingface.co/datasets/open-llm-leaderboard/details_codellama__CodeLlama-70b-hf/blob/main/results_2024-02-02T06-27-09.209983.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.5954881773778198, "acc_stderr": 0.03341128708368595, "acc_norm": 0.5993131783154683, "acc_norm_stderr": 0.0340914669738772, "mc1": 0.2607099143206854, "mc1_stderr": 0.015368841620766373, "mc2": 0.39788477413004975, "mc2_stderr": 0.014288917719366868 }, "harness|arc:challenge|25": { "acc": 0.5426621160409556, "acc_stderr": 0.014558106543924058, "acc_norm": 0.5674061433447098, "acc_norm_stderr": 0.01447800569418253 }, "harness|hellaswag|10": { "acc": 0.5802628958374826, "acc_stderr": 0.004925072159723829, "acc_norm": 0.7821151165106552, "acc_norm_stderr": 0.004119650817714288 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.35, "acc_stderr": 0.0479372485441102, "acc_norm": 0.35, "acc_norm_stderr": 0.0479372485441102 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.48148148148148145, "acc_stderr": 0.043163785995113245, "acc_norm": 0.48148148148148145, "acc_norm_stderr": 0.043163785995113245 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.6052631578947368, "acc_stderr": 0.039777499346220734, "acc_norm": 0.6052631578947368, "acc_norm_stderr": 0.039777499346220734 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.64, "acc_stderr": 0.048241815132442176, "acc_norm": 0.64, "acc_norm_stderr": 0.048241815132442176 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.5962264150943396, "acc_stderr": 0.030197611600197946, "acc_norm": 0.5962264150943396, "acc_norm_stderr": 0.030197611600197946 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.5625, "acc_stderr": 0.04148415739394154, "acc_norm": 0.5625, "acc_norm_stderr": 0.04148415739394154 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.33, "acc_stderr": 0.04725815626252605, "acc_norm": 0.33, "acc_norm_stderr": 0.04725815626252605 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.56, "acc_stderr": 0.049888765156985884, "acc_norm": 0.56, "acc_norm_stderr": 0.049888765156985884 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.39, "acc_stderr": 0.04902071300001975, "acc_norm": 0.39, "acc_norm_stderr": 0.04902071300001975 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.5375722543352601, "acc_stderr": 0.0380168510452446, "acc_norm": 0.5375722543352601, "acc_norm_stderr": 0.0380168510452446 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.35294117647058826, "acc_stderr": 0.04755129616062948, "acc_norm": 0.35294117647058826, "acc_norm_stderr": 0.04755129616062948 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.79, "acc_stderr": 0.040936018074033256, "acc_norm": 0.79, "acc_norm_stderr": 0.040936018074033256 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.5659574468085107, "acc_stderr": 0.03240038086792747, "acc_norm": 0.5659574468085107, "acc_norm_stderr": 0.03240038086792747 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.45614035087719296, "acc_stderr": 0.04685473041907789, "acc_norm": 0.45614035087719296, "acc_norm_stderr": 0.04685473041907789 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.5241379310344828, "acc_stderr": 0.041618085035015295, "acc_norm": 0.5241379310344828, "acc_norm_stderr": 0.041618085035015295 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.4021164021164021, "acc_stderr": 0.025253032554997692, "acc_norm": 0.4021164021164021, "acc_norm_stderr": 0.025253032554997692 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.47619047619047616, "acc_stderr": 0.04467062628403273, "acc_norm": 0.47619047619047616, "acc_norm_stderr": 0.04467062628403273 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.37, "acc_stderr": 0.048523658709391, "acc_norm": 0.37, "acc_norm_stderr": 0.048523658709391 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.667741935483871, "acc_stderr": 0.0267955608481228, "acc_norm": 0.667741935483871, "acc_norm_stderr": 0.0267955608481228 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.4236453201970443, "acc_stderr": 0.03476725747649037, "acc_norm": 0.4236453201970443, "acc_norm_stderr": 0.03476725747649037 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.73, "acc_stderr": 0.044619604333847394, "acc_norm": 0.73, "acc_norm_stderr": 0.044619604333847394 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.7454545454545455, "acc_stderr": 0.03401506715249039, "acc_norm": 0.7454545454545455, "acc_norm_stderr": 0.03401506715249039 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.7424242424242424, "acc_stderr": 0.03115626951964683, "acc_norm": 0.7424242424242424, "acc_norm_stderr": 0.03115626951964683 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.8082901554404145, "acc_stderr": 0.02840895362624528, "acc_norm": 0.8082901554404145, "acc_norm_stderr": 0.02840895362624528 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.5846153846153846, "acc_stderr": 0.024985354923102325, "acc_norm": 0.5846153846153846, "acc_norm_stderr": 0.024985354923102325 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.337037037037037, "acc_stderr": 0.028820884666253255, "acc_norm": 0.337037037037037, "acc_norm_stderr": 0.028820884666253255 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.6092436974789915, "acc_stderr": 0.031693802357129965, "acc_norm": 0.6092436974789915, "acc_norm_stderr": 0.031693802357129965 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.41721854304635764, "acc_stderr": 0.04026141497634611, "acc_norm": 0.41721854304635764, "acc_norm_stderr": 0.04026141497634611 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.7596330275229358, "acc_stderr": 0.01832060732096407, "acc_norm": 0.7596330275229358, "acc_norm_stderr": 0.01832060732096407 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.47685185185185186, "acc_stderr": 0.034063153607115065, "acc_norm": 0.47685185185185186, "acc_norm_stderr": 0.034063153607115065 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.7647058823529411, "acc_stderr": 0.029771775228145635, "acc_norm": 0.7647058823529411, "acc_norm_stderr": 0.029771775228145635 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.7890295358649789, "acc_stderr": 0.02655837250266192, "acc_norm": 0.7890295358649789, "acc_norm_stderr": 0.02655837250266192 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.6547085201793722, "acc_stderr": 0.03191100192835794, "acc_norm": 0.6547085201793722, "acc_norm_stderr": 0.03191100192835794 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.7099236641221374, "acc_stderr": 0.03980066246467766, "acc_norm": 0.7099236641221374, "acc_norm_stderr": 0.03980066246467766 }, "harness|hendrycksTest-international_law|5": { "acc": 0.743801652892562, "acc_stderr": 0.03984979653302872, "acc_norm": 0.743801652892562, "acc_norm_stderr": 0.03984979653302872 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.7222222222222222, "acc_stderr": 0.043300437496507395, "acc_norm": 0.7222222222222222, "acc_norm_stderr": 0.043300437496507395 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.7239263803680982, "acc_stderr": 0.035123852837050475, "acc_norm": 0.7239263803680982, "acc_norm_stderr": 0.035123852837050475 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.5, "acc_stderr": 0.04745789978762494, "acc_norm": 0.5, "acc_norm_stderr": 0.04745789978762494 }, "harness|hendrycksTest-management|5": { "acc": 0.7572815533980582, "acc_stderr": 0.042450224863844956, "acc_norm": 0.7572815533980582, "acc_norm_stderr": 0.042450224863844956 }, "harness|hendrycksTest-marketing|5": { "acc": 0.811965811965812, "acc_stderr": 0.025598193686652254, "acc_norm": 0.811965811965812, "acc_norm_stderr": 0.025598193686652254 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.57, "acc_stderr": 0.049756985195624284, "acc_norm": 0.57, "acc_norm_stderr": 0.049756985195624284 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.7407407407407407, "acc_stderr": 0.015671006009339572, "acc_norm": 0.7407407407407407, "acc_norm_stderr": 0.015671006009339572 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.6502890173410405, "acc_stderr": 0.025674281456531015, "acc_norm": 0.6502890173410405, "acc_norm_stderr": 0.025674281456531015 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.4223463687150838, "acc_stderr": 0.016519594275297117, "acc_norm": 0.4223463687150838, "acc_norm_stderr": 0.016519594275297117 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.6241830065359477, "acc_stderr": 0.02773283435336393, "acc_norm": 0.6241830065359477, "acc_norm_stderr": 0.02773283435336393 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.6816720257234726, "acc_stderr": 0.026457225067811025, "acc_norm": 0.6816720257234726, "acc_norm_stderr": 0.026457225067811025 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.6327160493827161, "acc_stderr": 0.026822801759507905, "acc_norm": 0.6327160493827161, "acc_norm_stderr": 0.026822801759507905 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.450354609929078, "acc_stderr": 0.029680105565029036, "acc_norm": 0.450354609929078, "acc_norm_stderr": 0.029680105565029036 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.41590612777053454, "acc_stderr": 0.01258832385031361, "acc_norm": 0.41590612777053454, "acc_norm_stderr": 0.01258832385031361 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.5220588235294118, "acc_stderr": 0.030343264224213514, "acc_norm": 0.5220588235294118, "acc_norm_stderr": 0.030343264224213514 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.5751633986928104, "acc_stderr": 0.019997973035458333, "acc_norm": 0.5751633986928104, "acc_norm_stderr": 0.019997973035458333 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.6909090909090909, "acc_stderr": 0.044262946482000985, "acc_norm": 0.6909090909090909, "acc_norm_stderr": 0.044262946482000985 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.6979591836734694, "acc_stderr": 0.0293936093198798, "acc_norm": 0.6979591836734694, "acc_norm_stderr": 0.0293936093198798 }, "harness|hendrycksTest-sociology|5": { "acc": 0.7711442786069652, "acc_stderr": 0.029705284056772426, "acc_norm": 0.7711442786069652, "acc_norm_stderr": 0.029705284056772426 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.87, "acc_stderr": 0.0337997668989631, "acc_norm": 0.87, "acc_norm_stderr": 0.0337997668989631 }, "harness|hendrycksTest-virology|5": { "acc": 0.46987951807228917, "acc_stderr": 0.03885425420866766, "acc_norm": 0.46987951807228917, "acc_norm_stderr": 0.03885425420866766 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.7543859649122807, "acc_stderr": 0.03301405946987249, "acc_norm": 0.7543859649122807, "acc_norm_stderr": 0.03301405946987249 }, "harness|truthfulqa:mc|0": { "mc1": 0.2607099143206854, "mc1_stderr": 0.015368841620766373, "mc2": 0.39788477413004975, "mc2_stderr": 0.014288917719366868 }, "harness|winogrande|5": { "acc": 0.7521704814522494, "acc_stderr": 0.01213438601986535 }, "harness|gsm8k|5": { "acc": 0.4397270659590599, "acc_stderr": 0.013672052434471577 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_codellama__CodeLlama-70b-hf
[ "region:us" ]
2024-02-02T06:29:30+00:00
{"pretty_name": "Evaluation run of codellama/CodeLlama-70b-hf", "dataset_summary": "Dataset automatically created during the evaluation run of model [codellama/CodeLlama-70b-hf](https://huggingface.co/codellama/CodeLlama-70b-hf) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_codellama__CodeLlama-70b-hf\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-02-02T06:27:09.209983](https://huggingface.co/datasets/open-llm-leaderboard/details_codellama__CodeLlama-70b-hf/blob/main/results_2024-02-02T06-27-09.209983.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.5954881773778198,\n \"acc_stderr\": 0.03341128708368595,\n \"acc_norm\": 0.5993131783154683,\n \"acc_norm_stderr\": 0.0340914669738772,\n \"mc1\": 0.2607099143206854,\n \"mc1_stderr\": 0.015368841620766373,\n \"mc2\": 0.39788477413004975,\n \"mc2_stderr\": 0.014288917719366868\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.5426621160409556,\n \"acc_stderr\": 0.014558106543924058,\n \"acc_norm\": 0.5674061433447098,\n \"acc_norm_stderr\": 0.01447800569418253\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.5802628958374826,\n \"acc_stderr\": 0.004925072159723829,\n \"acc_norm\": 0.7821151165106552,\n \"acc_norm_stderr\": 0.004119650817714288\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.35,\n \"acc_stderr\": 0.0479372485441102,\n \"acc_norm\": 0.35,\n \"acc_norm_stderr\": 0.0479372485441102\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.48148148148148145,\n \"acc_stderr\": 0.043163785995113245,\n \"acc_norm\": 0.48148148148148145,\n \"acc_norm_stderr\": 0.043163785995113245\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.6052631578947368,\n \"acc_stderr\": 0.039777499346220734,\n \"acc_norm\": 0.6052631578947368,\n \"acc_norm_stderr\": 0.039777499346220734\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.64,\n \"acc_stderr\": 0.048241815132442176,\n \"acc_norm\": 0.64,\n \"acc_norm_stderr\": 0.048241815132442176\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.5962264150943396,\n \"acc_stderr\": 0.030197611600197946,\n \"acc_norm\": 0.5962264150943396,\n \"acc_norm_stderr\": 0.030197611600197946\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.5625,\n \"acc_stderr\": 0.04148415739394154,\n \"acc_norm\": 0.5625,\n \"acc_norm_stderr\": 0.04148415739394154\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.33,\n \"acc_stderr\": 0.04725815626252605,\n \"acc_norm\": 0.33,\n \"acc_norm_stderr\": 0.04725815626252605\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.56,\n \"acc_stderr\": 0.049888765156985884,\n \"acc_norm\": 0.56,\n \"acc_norm_stderr\": 0.049888765156985884\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.39,\n \"acc_stderr\": 0.04902071300001975,\n \"acc_norm\": 0.39,\n \"acc_norm_stderr\": 0.04902071300001975\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.5375722543352601,\n \"acc_stderr\": 0.0380168510452446,\n \"acc_norm\": 0.5375722543352601,\n \"acc_norm_stderr\": 0.0380168510452446\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.35294117647058826,\n \"acc_stderr\": 0.04755129616062948,\n \"acc_norm\": 0.35294117647058826,\n \"acc_norm_stderr\": 0.04755129616062948\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.79,\n \"acc_stderr\": 0.040936018074033256,\n \"acc_norm\": 0.79,\n \"acc_norm_stderr\": 0.040936018074033256\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.5659574468085107,\n \"acc_stderr\": 0.03240038086792747,\n \"acc_norm\": 0.5659574468085107,\n \"acc_norm_stderr\": 0.03240038086792747\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.45614035087719296,\n \"acc_stderr\": 0.04685473041907789,\n \"acc_norm\": 0.45614035087719296,\n \"acc_norm_stderr\": 0.04685473041907789\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.5241379310344828,\n \"acc_stderr\": 0.041618085035015295,\n \"acc_norm\": 0.5241379310344828,\n \"acc_norm_stderr\": 0.041618085035015295\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.4021164021164021,\n \"acc_stderr\": 0.025253032554997692,\n \"acc_norm\": 0.4021164021164021,\n \"acc_norm_stderr\": 0.025253032554997692\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.47619047619047616,\n \"acc_stderr\": 0.04467062628403273,\n \"acc_norm\": 0.47619047619047616,\n \"acc_norm_stderr\": 0.04467062628403273\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.37,\n \"acc_stderr\": 0.048523658709391,\n \"acc_norm\": 0.37,\n \"acc_norm_stderr\": 0.048523658709391\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.667741935483871,\n \"acc_stderr\": 0.0267955608481228,\n \"acc_norm\": 0.667741935483871,\n \"acc_norm_stderr\": 0.0267955608481228\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.4236453201970443,\n \"acc_stderr\": 0.03476725747649037,\n \"acc_norm\": 0.4236453201970443,\n \"acc_norm_stderr\": 0.03476725747649037\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.73,\n \"acc_stderr\": 0.044619604333847394,\n \"acc_norm\": 0.73,\n \"acc_norm_stderr\": 0.044619604333847394\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.7454545454545455,\n \"acc_stderr\": 0.03401506715249039,\n \"acc_norm\": 0.7454545454545455,\n \"acc_norm_stderr\": 0.03401506715249039\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.7424242424242424,\n \"acc_stderr\": 0.03115626951964683,\n \"acc_norm\": 0.7424242424242424,\n \"acc_norm_stderr\": 0.03115626951964683\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.8082901554404145,\n \"acc_stderr\": 0.02840895362624528,\n \"acc_norm\": 0.8082901554404145,\n \"acc_norm_stderr\": 0.02840895362624528\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.5846153846153846,\n \"acc_stderr\": 0.024985354923102325,\n \"acc_norm\": 0.5846153846153846,\n \"acc_norm_stderr\": 0.024985354923102325\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.337037037037037,\n \"acc_stderr\": 0.028820884666253255,\n \"acc_norm\": 0.337037037037037,\n \"acc_norm_stderr\": 0.028820884666253255\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.6092436974789915,\n \"acc_stderr\": 0.031693802357129965,\n \"acc_norm\": 0.6092436974789915,\n \"acc_norm_stderr\": 0.031693802357129965\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.41721854304635764,\n \"acc_stderr\": 0.04026141497634611,\n \"acc_norm\": 0.41721854304635764,\n \"acc_norm_stderr\": 0.04026141497634611\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.7596330275229358,\n \"acc_stderr\": 0.01832060732096407,\n \"acc_norm\": 0.7596330275229358,\n \"acc_norm_stderr\": 0.01832060732096407\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.47685185185185186,\n \"acc_stderr\": 0.034063153607115065,\n \"acc_norm\": 0.47685185185185186,\n \"acc_norm_stderr\": 0.034063153607115065\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.7647058823529411,\n \"acc_stderr\": 0.029771775228145635,\n \"acc_norm\": 0.7647058823529411,\n \"acc_norm_stderr\": 0.029771775228145635\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.7890295358649789,\n \"acc_stderr\": 0.02655837250266192,\n \"acc_norm\": 0.7890295358649789,\n \"acc_norm_stderr\": 0.02655837250266192\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.6547085201793722,\n \"acc_stderr\": 0.03191100192835794,\n \"acc_norm\": 0.6547085201793722,\n \"acc_norm_stderr\": 0.03191100192835794\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.7099236641221374,\n \"acc_stderr\": 0.03980066246467766,\n \"acc_norm\": 0.7099236641221374,\n \"acc_norm_stderr\": 0.03980066246467766\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.743801652892562,\n \"acc_stderr\": 0.03984979653302872,\n \"acc_norm\": 0.743801652892562,\n \"acc_norm_stderr\": 0.03984979653302872\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.7222222222222222,\n \"acc_stderr\": 0.043300437496507395,\n \"acc_norm\": 0.7222222222222222,\n \"acc_norm_stderr\": 0.043300437496507395\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.7239263803680982,\n \"acc_stderr\": 0.035123852837050475,\n \"acc_norm\": 0.7239263803680982,\n \"acc_norm_stderr\": 0.035123852837050475\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.5,\n \"acc_stderr\": 0.04745789978762494,\n \"acc_norm\": 0.5,\n \"acc_norm_stderr\": 0.04745789978762494\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.7572815533980582,\n \"acc_stderr\": 0.042450224863844956,\n \"acc_norm\": 0.7572815533980582,\n \"acc_norm_stderr\": 0.042450224863844956\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.811965811965812,\n \"acc_stderr\": 0.025598193686652254,\n \"acc_norm\": 0.811965811965812,\n \"acc_norm_stderr\": 0.025598193686652254\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.57,\n \"acc_stderr\": 0.049756985195624284,\n \"acc_norm\": 0.57,\n \"acc_norm_stderr\": 0.049756985195624284\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.7407407407407407,\n \"acc_stderr\": 0.015671006009339572,\n \"acc_norm\": 0.7407407407407407,\n \"acc_norm_stderr\": 0.015671006009339572\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.6502890173410405,\n \"acc_stderr\": 0.025674281456531015,\n \"acc_norm\": 0.6502890173410405,\n \"acc_norm_stderr\": 0.025674281456531015\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.4223463687150838,\n \"acc_stderr\": 0.016519594275297117,\n \"acc_norm\": 0.4223463687150838,\n \"acc_norm_stderr\": 0.016519594275297117\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.6241830065359477,\n \"acc_stderr\": 0.02773283435336393,\n \"acc_norm\": 0.6241830065359477,\n \"acc_norm_stderr\": 0.02773283435336393\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.6816720257234726,\n \"acc_stderr\": 0.026457225067811025,\n \"acc_norm\": 0.6816720257234726,\n \"acc_norm_stderr\": 0.026457225067811025\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.6327160493827161,\n \"acc_stderr\": 0.026822801759507905,\n \"acc_norm\": 0.6327160493827161,\n \"acc_norm_stderr\": 0.026822801759507905\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.450354609929078,\n \"acc_stderr\": 0.029680105565029036,\n \"acc_norm\": 0.450354609929078,\n \"acc_norm_stderr\": 0.029680105565029036\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.41590612777053454,\n \"acc_stderr\": 0.01258832385031361,\n \"acc_norm\": 0.41590612777053454,\n \"acc_norm_stderr\": 0.01258832385031361\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.5220588235294118,\n \"acc_stderr\": 0.030343264224213514,\n \"acc_norm\": 0.5220588235294118,\n \"acc_norm_stderr\": 0.030343264224213514\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.5751633986928104,\n \"acc_stderr\": 0.019997973035458333,\n \"acc_norm\": 0.5751633986928104,\n \"acc_norm_stderr\": 0.019997973035458333\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6909090909090909,\n \"acc_stderr\": 0.044262946482000985,\n \"acc_norm\": 0.6909090909090909,\n \"acc_norm_stderr\": 0.044262946482000985\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.6979591836734694,\n \"acc_stderr\": 0.0293936093198798,\n \"acc_norm\": 0.6979591836734694,\n \"acc_norm_stderr\": 0.0293936093198798\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.7711442786069652,\n \"acc_stderr\": 0.029705284056772426,\n \"acc_norm\": 0.7711442786069652,\n \"acc_norm_stderr\": 0.029705284056772426\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.87,\n \"acc_stderr\": 0.0337997668989631,\n \"acc_norm\": 0.87,\n \"acc_norm_stderr\": 0.0337997668989631\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.46987951807228917,\n \"acc_stderr\": 0.03885425420866766,\n \"acc_norm\": 0.46987951807228917,\n \"acc_norm_stderr\": 0.03885425420866766\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.7543859649122807,\n \"acc_stderr\": 0.03301405946987249,\n \"acc_norm\": 0.7543859649122807,\n \"acc_norm_stderr\": 0.03301405946987249\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.2607099143206854,\n \"mc1_stderr\": 0.015368841620766373,\n \"mc2\": 0.39788477413004975,\n \"mc2_stderr\": 0.014288917719366868\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.7521704814522494,\n \"acc_stderr\": 0.01213438601986535\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.4397270659590599,\n \"acc_stderr\": 0.013672052434471577\n }\n}\n```", "repo_url": "https://huggingface.co/codellama/CodeLlama-70b-hf", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_02_02T06_27_09.209983", "path": ["**/details_harness|arc:challenge|25_2024-02-02T06-27-09.209983.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-02-02T06-27-09.209983.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_02_02T06_27_09.209983", "path": ["**/details_harness|gsm8k|5_2024-02-02T06-27-09.209983.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-02-02T06-27-09.209983.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_02_02T06_27_09.209983", "path": ["**/details_harness|hellaswag|10_2024-02-02T06-27-09.209983.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-02-02T06-27-09.209983.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_02_02T06_27_09.209983", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-02T06-27-09.209983.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-02-02T06-27-09.209983.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-02-02T06-27-09.209983.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-02-02T06-27-09.209983.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-02T06-27-09.209983.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-02-02T06-27-09.209983.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-02-02T06-27-09.209983.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-02-02T06-27-09.209983.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-02-02T06-27-09.209983.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-02-02T06-27-09.209983.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-02-02T06-27-09.209983.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-02-02T06-27-09.209983.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-02T06-27-09.209983.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-02-02T06-27-09.209983.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-02T06-27-09.209983.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-02T06-27-09.209983.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-02-02T06-27-09.209983.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-02-02T06-27-09.209983.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-02-02T06-27-09.209983.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-02T06-27-09.209983.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-02T06-27-09.209983.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-02T06-27-09.209983.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-02-02T06-27-09.209983.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-02T06-27-09.209983.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-02T06-27-09.209983.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-02T06-27-09.209983.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-02T06-27-09.209983.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-02-02T06-27-09.209983.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-02T06-27-09.209983.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-02T06-27-09.209983.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-02T06-27-09.209983.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-02T06-27-09.209983.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-02-02T06-27-09.209983.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-02-02T06-27-09.209983.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-02-02T06-27-09.209983.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-02-02T06-27-09.209983.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-02T06-27-09.209983.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-02-02T06-27-09.209983.parquet", "**/details_harness|hendrycksTest-management|5_2024-02-02T06-27-09.209983.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-02-02T06-27-09.209983.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-02-02T06-27-09.209983.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-02-02T06-27-09.209983.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-02-02T06-27-09.209983.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-02T06-27-09.209983.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-02-02T06-27-09.209983.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-02-02T06-27-09.209983.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-02-02T06-27-09.209983.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-02-02T06-27-09.209983.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-02-02T06-27-09.209983.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-02-02T06-27-09.209983.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-02-02T06-27-09.209983.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-02-02T06-27-09.209983.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-02-02T06-27-09.209983.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-02-02T06-27-09.209983.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-02T06-27-09.209983.parquet", "**/details_harness|hendrycksTest-virology|5_2024-02-02T06-27-09.209983.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-02-02T06-27-09.209983.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-02T06-27-09.209983.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-02-02T06-27-09.209983.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-02-02T06-27-09.209983.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-02-02T06-27-09.209983.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-02T06-27-09.209983.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-02-02T06-27-09.209983.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-02-02T06-27-09.209983.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-02-02T06-27-09.209983.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-02-02T06-27-09.209983.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-02-02T06-27-09.209983.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-02-02T06-27-09.209983.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-02-02T06-27-09.209983.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-02T06-27-09.209983.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-02-02T06-27-09.209983.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-02T06-27-09.209983.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-02T06-27-09.209983.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-02-02T06-27-09.209983.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-02-02T06-27-09.209983.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-02-02T06-27-09.209983.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-02T06-27-09.209983.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-02T06-27-09.209983.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-02T06-27-09.209983.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-02-02T06-27-09.209983.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-02T06-27-09.209983.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-02T06-27-09.209983.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-02T06-27-09.209983.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-02T06-27-09.209983.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-02-02T06-27-09.209983.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-02T06-27-09.209983.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-02T06-27-09.209983.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-02T06-27-09.209983.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-02T06-27-09.209983.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-02-02T06-27-09.209983.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-02-02T06-27-09.209983.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-02-02T06-27-09.209983.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-02-02T06-27-09.209983.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-02T06-27-09.209983.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-02-02T06-27-09.209983.parquet", "**/details_harness|hendrycksTest-management|5_2024-02-02T06-27-09.209983.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-02-02T06-27-09.209983.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-02-02T06-27-09.209983.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-02-02T06-27-09.209983.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-02-02T06-27-09.209983.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-02T06-27-09.209983.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-02-02T06-27-09.209983.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-02-02T06-27-09.209983.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-02-02T06-27-09.209983.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-02-02T06-27-09.209983.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-02-02T06-27-09.209983.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-02-02T06-27-09.209983.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-02-02T06-27-09.209983.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-02-02T06-27-09.209983.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-02-02T06-27-09.209983.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-02-02T06-27-09.209983.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-02T06-27-09.209983.parquet", "**/details_harness|hendrycksTest-virology|5_2024-02-02T06-27-09.209983.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-02-02T06-27-09.209983.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_02_02T06_27_09.209983", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-02T06-27-09.209983.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-02T06-27-09.209983.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_02_02T06_27_09.209983", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-02-02T06-27-09.209983.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-02-02T06-27-09.209983.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_02_02T06_27_09.209983", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-02-02T06-27-09.209983.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-02-02T06-27-09.209983.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_02_02T06_27_09.209983", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-02-02T06-27-09.209983.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-02-02T06-27-09.209983.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_02_02T06_27_09.209983", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-02T06-27-09.209983.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-02T06-27-09.209983.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_02_02T06_27_09.209983", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-02-02T06-27-09.209983.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-02-02T06-27-09.209983.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_02_02T06_27_09.209983", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-02-02T06-27-09.209983.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-02-02T06-27-09.209983.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_02_02T06_27_09.209983", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-02-02T06-27-09.209983.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-02-02T06-27-09.209983.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_02_02T06_27_09.209983", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-02-02T06-27-09.209983.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-02-02T06-27-09.209983.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_02_02T06_27_09.209983", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-02-02T06-27-09.209983.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-02-02T06-27-09.209983.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_02_02T06_27_09.209983", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-02-02T06-27-09.209983.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-02-02T06-27-09.209983.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_02_02T06_27_09.209983", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-02-02T06-27-09.209983.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-02-02T06-27-09.209983.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_02_02T06_27_09.209983", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-02T06-27-09.209983.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-02T06-27-09.209983.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_02_02T06_27_09.209983", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-02-02T06-27-09.209983.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-02-02T06-27-09.209983.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_02_02T06_27_09.209983", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-02T06-27-09.209983.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-02T06-27-09.209983.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_02_02T06_27_09.209983", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-02T06-27-09.209983.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-02T06-27-09.209983.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_02_02T06_27_09.209983", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-02-02T06-27-09.209983.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-02-02T06-27-09.209983.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_02_02T06_27_09.209983", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-02-02T06-27-09.209983.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-02-02T06-27-09.209983.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_02_02T06_27_09.209983", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-02-02T06-27-09.209983.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-02-02T06-27-09.209983.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_02_02T06_27_09.209983", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-02T06-27-09.209983.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-02T06-27-09.209983.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_02_02T06_27_09.209983", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-02T06-27-09.209983.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-02T06-27-09.209983.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_02_02T06_27_09.209983", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-02T06-27-09.209983.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-02T06-27-09.209983.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_02_02T06_27_09.209983", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-02-02T06-27-09.209983.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-02-02T06-27-09.209983.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_02_02T06_27_09.209983", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-02T06-27-09.209983.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-02T06-27-09.209983.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_02_02T06_27_09.209983", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-02T06-27-09.209983.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-02T06-27-09.209983.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_02_02T06_27_09.209983", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-02T06-27-09.209983.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-02T06-27-09.209983.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_02_02T06_27_09.209983", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-02T06-27-09.209983.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-02T06-27-09.209983.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_02_02T06_27_09.209983", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-02-02T06-27-09.209983.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-02-02T06-27-09.209983.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_02_02T06_27_09.209983", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-02T06-27-09.209983.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-02T06-27-09.209983.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_02_02T06_27_09.209983", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-02T06-27-09.209983.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-02T06-27-09.209983.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_02_02T06_27_09.209983", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-02T06-27-09.209983.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-02T06-27-09.209983.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_02_02T06_27_09.209983", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-02T06-27-09.209983.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-02T06-27-09.209983.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_02_02T06_27_09.209983", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-02-02T06-27-09.209983.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-02-02T06-27-09.209983.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_02_02T06_27_09.209983", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-02-02T06-27-09.209983.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-02-02T06-27-09.209983.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_02_02T06_27_09.209983", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-02-02T06-27-09.209983.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-02-02T06-27-09.209983.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_02_02T06_27_09.209983", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-02-02T06-27-09.209983.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-02-02T06-27-09.209983.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_02_02T06_27_09.209983", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-02T06-27-09.209983.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-02T06-27-09.209983.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_02_02T06_27_09.209983", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-02-02T06-27-09.209983.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-02-02T06-27-09.209983.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_02_02T06_27_09.209983", "path": ["**/details_harness|hendrycksTest-management|5_2024-02-02T06-27-09.209983.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-02-02T06-27-09.209983.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_02_02T06_27_09.209983", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-02-02T06-27-09.209983.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-02-02T06-27-09.209983.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_02_02T06_27_09.209983", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-02-02T06-27-09.209983.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-02-02T06-27-09.209983.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_02_02T06_27_09.209983", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-02-02T06-27-09.209983.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-02-02T06-27-09.209983.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_02_02T06_27_09.209983", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-02-02T06-27-09.209983.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-02-02T06-27-09.209983.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_02_02T06_27_09.209983", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-02T06-27-09.209983.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-02T06-27-09.209983.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_02_02T06_27_09.209983", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-02-02T06-27-09.209983.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-02-02T06-27-09.209983.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_02_02T06_27_09.209983", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-02-02T06-27-09.209983.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-02-02T06-27-09.209983.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_02_02T06_27_09.209983", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-02-02T06-27-09.209983.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-02-02T06-27-09.209983.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_02_02T06_27_09.209983", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-02-02T06-27-09.209983.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-02-02T06-27-09.209983.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_02_02T06_27_09.209983", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-02-02T06-27-09.209983.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-02-02T06-27-09.209983.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_02_02T06_27_09.209983", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-02-02T06-27-09.209983.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-02-02T06-27-09.209983.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_02_02T06_27_09.209983", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-02-02T06-27-09.209983.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-02-02T06-27-09.209983.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_02_02T06_27_09.209983", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-02-02T06-27-09.209983.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-02-02T06-27-09.209983.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_02_02T06_27_09.209983", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-02-02T06-27-09.209983.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-02-02T06-27-09.209983.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_02_02T06_27_09.209983", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-02-02T06-27-09.209983.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-02-02T06-27-09.209983.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_02_02T06_27_09.209983", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-02T06-27-09.209983.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-02T06-27-09.209983.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_02_02T06_27_09.209983", "path": ["**/details_harness|hendrycksTest-virology|5_2024-02-02T06-27-09.209983.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-02-02T06-27-09.209983.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_02_02T06_27_09.209983", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-02-02T06-27-09.209983.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-02-02T06-27-09.209983.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_02_02T06_27_09.209983", "path": ["**/details_harness|truthfulqa:mc|0_2024-02-02T06-27-09.209983.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-02-02T06-27-09.209983.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_02_02T06_27_09.209983", "path": ["**/details_harness|winogrande|5_2024-02-02T06-27-09.209983.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-02-02T06-27-09.209983.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_02_02T06_27_09.209983", "path": ["results_2024-02-02T06-27-09.209983.parquet"]}, {"split": "latest", "path": ["results_2024-02-02T06-27-09.209983.parquet"]}]}]}
2024-02-02T06:29:57+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of codellama/CodeLlama-70b-hf Dataset automatically created during the evaluation run of model codellama/CodeLlama-70b-hf on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-02-02T06:27:09.209983(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of codellama/CodeLlama-70b-hf\n\n\n\nDataset automatically created during the evaluation run of model codellama/CodeLlama-70b-hf on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-02-02T06:27:09.209983(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of codellama/CodeLlama-70b-hf\n\n\n\nDataset automatically created during the evaluation run of model codellama/CodeLlama-70b-hf on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-02-02T06:27:09.209983(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
d67138e705d963e346253a80e59676ddb418810a
# Dataset Summary MLDR is a Multilingual Long-Document Retrieval dataset built on Wikipeida, Wudao and mC4, covering 13 typologically diverse languages. Specifically, we sample lengthy articles from Wikipedia, Wudao and mC4 datasets and randomly choose paragraphs from them. Then we use GPT-3.5 to generate questions based on these paragraphs. The generated question and the sampled article constitute a new text pair to the dataset. The prompt for GPT3.5 is “You are a curious AI assistant, please generate one specific and valuable question based on the following text. The generated question should revolve around the core content of this text, and avoid using pronouns (e.g., ”this”). Note that you should generate only one question, without including additional content:”. The details of MLDR are shown in the following table. | Language Code | Language | Source | #train | #dev | #test | #corpus | Avg. Length of Docs | | :-----------: | :--------: | :--------------: | :-----: | :---: | :---: | :-----: | :-----------------: | | ar | Arabic | Wikipedia | 1,817 | 200 | 200 | 7,607 | 9,428 | | de | German | Wikipedia, mC4 | 1,847 | 200 | 200 | 10,000 | 9,039 | | en | English | Wikipedia | 10,000 | 200 | 800 | 200,000 | 3,308 | | es | Spanish | Wikipedia, mc4 | 2,254 | 200 | 200 | 9,551 | 8,771 | | fr | French | Wikipedia | 1,608 | 200 | 200 | 10,000 | 9,659 | | hi | Hindi | Wikipedia | 1,618 | 200 | 200 | 3,806 | 5,555 | | it | Italian | Wikipedia | 2,151 | 200 | 200 | 10,000 | 9,195 | | ja | Japanese | Wikipedia | 2,262 | 200 | 200 | 10,000 | 9,297 | | ko | Korean | Wikipedia | 2,198 | 200 | 200 | 6,176 | 7,832 | | pt | Portuguese | Wikipedia | 1,845 | 200 | 200 | 6,569 | 7,922 | | ru | Russian | Wikipedia | 1,864 | 200 | 200 | 10,000 | 9,723 | | th | Thai | mC4 | 1,970 | 200 | 200 | 10,000 | 8,089 | | zh | Chinese | Wikipedia, Wudao | 10,000 | 200 | 800 | 200,000 | 4,249 | | Total | - | - | 41,434 | 2,600 | 3,800 | 493,709 | 4,737 | # Evaluation Pipeline This task has been merged into [MTEB](https://github.com/embeddings-benchmark/mteb), you can easily use mteb tool to do the evaluation. We also provide some [scripts](https://github.com/FlagOpen/FlagEmbedding/tree/master/C_MTEB/MLDR) to test dense retrieval, sparse retrieval and colbert retrieval. # Dataset Structure The only configuration here is the `language`. For each language, there are three splits: `train`, `dev`, and `test`. The `corpus` is also avaliable here. An example of `train` set looks as follows: ```python { 'query_id': 'q-zh-<...>', 'query': '...', 'positive_passages': [ { 'docid': 'doc-zh-<...>', 'text': '...' }], 'negative_passages': [ { 'docid': 'doc-zh-<...>', 'text': '...' }, ... ] } ``` An example of `dev` and `test` set looks as follows. ```python { 'query_id': 'q-zh-<...>', 'query': '...', 'positive_passages': [ { 'docid': 'doc-zh-<...>', 'text': '...' }], 'negative_passages': [] } ``` An example of `corpus` looks as follows. ```python { 'docid': 'doc-zh-<...>', 'text': '...' } ``` # Load Dataset An example to load the dataset: ```python # Avaliable languages: ['ar', 'de', 'en', 'es', 'fr', 'hi', 'it', 'ja', 'ko', 'pt', 'ru', 'th', 'zh'] language = 'zh' # to load all train, dev and test sets dataset = load_dataset('Shitao/MLDR', language) # or to load a specific split: split = 'train' dataset = load_dataset('Shitao/MLDR', language, split=split) # load corpus corpus = load_dataset('Shitao/MLDR', f'corpus-{language}', split='corpus') ``` # Citation Information ``` @misc{bge-m3, title={BGE M3-Embedding: Multi-Lingual, Multi-Functionality, Multi-Granularity Text Embeddings Through Self-Knowledge Distillation}, author={Jianlv Chen and Shitao Xiao and Peitian Zhang and Kun Luo and Defu Lian and Zheng Liu}, year={2024}, eprint={2402.03216}, archivePrefix={arXiv}, primaryClass={cs.CL} } ```
Shitao/MLDR
[ "task_categories:text-retrieval", "multilinguality:multilingual", "language:ar", "language:de", "language:en", "language:es", "language:fr", "language:hi", "language:it", "language:ja", "language:ko", "language:pt", "language:ru", "language:th", "language:zh", "license:mit", "arxiv:2402.03216", "region:us" ]
2024-02-02T06:32:59+00:00
{"language": ["ar", "de", "en", "es", "fr", "hi", "it", "ja", "ko", "pt", "ru", "th", "zh"], "license": "mit", "multilinguality": ["multilingual"], "task_categories": ["text-retrieval"]}
2024-02-06T08:44:31+00:00
[ "2402.03216" ]
[ "ar", "de", "en", "es", "fr", "hi", "it", "ja", "ko", "pt", "ru", "th", "zh" ]
TAGS #task_categories-text-retrieval #multilinguality-multilingual #language-Arabic #language-German #language-English #language-Spanish #language-French #language-Hindi #language-Italian #language-Japanese #language-Korean #language-Portuguese #language-Russian #language-Thai #language-Chinese #license-mit #arxiv-2402.03216 #region-us
Dataset Summary =============== MLDR is a Multilingual Long-Document Retrieval dataset built on Wikipeida, Wudao and mC4, covering 13 typologically diverse languages. Specifically, we sample lengthy articles from Wikipedia, Wudao and mC4 datasets and randomly choose paragraphs from them. Then we use GPT-3.5 to generate questions based on these paragraphs. The generated question and the sampled article constitute a new text pair to the dataset. The prompt for GPT3.5 is “You are a curious AI assistant, please generate one specific and valuable question based on the following text. The generated question should revolve around the core content of this text, and avoid using pronouns (e.g., ”this”). Note that you should generate only one question, without including additional content:”. The details of MLDR are shown in the following table. Evaluation Pipeline =================== This task has been merged into MTEB, you can easily use mteb tool to do the evaluation. We also provide some scripts to test dense retrieval, sparse retrieval and colbert retrieval. Dataset Structure ================= The only configuration here is the 'language'. For each language, there are three splits: 'train', 'dev', and 'test'. The 'corpus' is also avaliable here. An example of 'train' set looks as follows: An example of 'dev' and 'test' set looks as follows. An example of 'corpus' looks as follows. Load Dataset ============ An example to load the dataset:
[]
[ "TAGS\n#task_categories-text-retrieval #multilinguality-multilingual #language-Arabic #language-German #language-English #language-Spanish #language-French #language-Hindi #language-Italian #language-Japanese #language-Korean #language-Portuguese #language-Russian #language-Thai #language-Chinese #license-mit #arxiv-2402.03216 #region-us \n" ]
3d95b07eb2a4553fb5b8e84a2a70d6ea7eb65829
# Dataset Card for "cnndm_small" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
DualAuthor/cnndm_small
[ "region:us" ]
2024-02-02T06:35:29+00:00
{"dataset_info": {"features": [{"name": "article", "dtype": "string"}, {"name": "highlights", "dtype": "string"}, {"name": "id", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 430715, "num_examples": 100}], "download_size": 276841, "dataset_size": 430715}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}]}
2024-02-02T06:35:34+00:00
[]
[]
TAGS #region-us
# Dataset Card for "cnndm_small" More Information needed
[ "# Dataset Card for \"cnndm_small\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"cnndm_small\"\n\nMore Information needed" ]
e7a57a34c5d5ba0aded664d1bffb40396f31c4e2
# JaWiki Wikipediaの[HTML形式のダンプファイル](https://dumps.wikimedia.org/other/enterprise_html/)から抽出したテキストデータセットです。 Wikiextractorによって抽出したテキストデータと異なり、段落などの文書構造を維持したまま、不要なマークアップのないテキストが利用できます。 ダンプファイルは、2024年1月1日に公開されたものを利用しています。 また、各種NLPタスクに利用しやすいよう、様々なデータを同梱しています。 各種前処理スクリプトは[GitHubのリポジトリ](https://github.com/hppRC/jawiki)をご参照ください。 ## データ構造 各レコードはWikipediaの記事一つに対応しています。 大まかなデータ構造と説明を以下に示します。 - id (int) - title (str) - 記事タイトルです。 - text (str) - 各段落の文章(`paragraphs`の`text`)を改行で結合したテキストです。 - paragraphs (list[dict[str, int | str]]) - 記事中の段落の集合です。各段落は辞書型で表現されており、以下のデータ構造に基づきます。 - paragraph_id (int) - 記事中で何番目の段落かを示す番号です。 - tag (str) - 当該段落をマークアップしていたHTMLタグの名称です。 - title (str | None) - 当該段落を含むセクションのタイトルです。 - 存在しない場合もあります。 - text (str) - 段落のテキスト本文です。 - abstract (str | None) - 記事の要約です。 - ない場合もあります。 - wikitext (str) - wikitextによって抽出された記事本文です。比較・解析精度向上に資する目的で`text`と併存しています。 - date_created (str) - 記事が作成された日付です。 - date_modified (str) - 記事が最後に編集された日付です。 - is_disambiguation_page (bool) - 曖昧さ回避のためのページかどうかを表す値です。`templates`に含まれる文字列から判別しています。 - is_sexual_page (bool) - 性的な内容を含むページかどうかを表す値です。`templates`に含まれる文字列から判別しています。 - is_violent_page (bool) - 暴力的な内容を含むページかどうかを表す値です。`templates`に含まれる文字列から判別しています。 - templates (list[str]) - 記事を作成する際に利用されたテンプレートのリストです。 - url (str) データセットの作成にあたり、[singletongue/wikipedia-utils](https://github.com/singletongue/wikipedia-utils)を参考に実装を行いました。 この場を借りて感謝申し上げます。
hpprc/jawiki
[ "language:ja", "license:cc-by-sa-3.0", "license:gfdl", "region:us" ]
2024-02-02T06:36:00+00:00
{"language": ["ja"], "license": ["cc-by-sa-3.0", "gfdl"], "pretty_name": "jawik", "dataset_info": {"features": [{"name": "id", "dtype": "int64"}, {"name": "title", "dtype": "string"}, {"name": "text", "dtype": "string"}, {"name": "paragraphs", "list": [{"name": "paragraph_id", "dtype": "int64"}, {"name": "tag", "dtype": "string"}, {"name": "text", "dtype": "string"}, {"name": "title", "dtype": "string"}]}, {"name": "abstract", "dtype": "string"}, {"name": "wikitext", "dtype": "string"}, {"name": "date_created", "dtype": "string"}, {"name": "date_modified", "dtype": "string"}, {"name": "is_disambiguation_page", "dtype": "bool"}, {"name": "is_sexual_page", "dtype": "bool"}, {"name": "is_violent_page", "dtype": "bool"}, {"name": "templates", "sequence": "string"}, {"name": "url", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 21992139146, "num_examples": 1399160}], "download_size": 11689147520, "dataset_size": 21992139146}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}]}
2024-02-13T15:19:49+00:00
[]
[ "ja" ]
TAGS #language-Japanese #license-cc-by-sa-3.0 #license-gfdl #region-us
# JaWiki WikipediaのHTML形式のダンプファイルから抽出したテキストデータセットです。 Wikiextractorによって抽出したテキストデータと異なり、段落などの文書構造を維持したまま、不要なマークアップのないテキストが利用できます。 ダンプファイルは、2024年1月1日に公開されたものを利用しています。 また、各種NLPタスクに利用しやすいよう、様々なデータを同梱しています。 各種前処理スクリプトはGitHubのリポジトリをご参照ください。 ## データ構造 各レコードはWikipediaの記事一つに対応しています。 大まかなデータ構造と説明を以下に示します。 - id (int) - title (str) - 記事タイトルです。 - text (str) - 各段落の文章('paragraphs'の'text')を改行で結合したテキストです。 - paragraphs (list[dict[str, int | str]]) - 記事中の段落の集合です。各段落は辞書型で表現されており、以下のデータ構造に基づきます。 - paragraph_id (int) - 記事中で何番目の段落かを示す番号です。 - tag (str) - 当該段落をマークアップしていたHTMLタグの名称です。 - title (str | None) - 当該段落を含むセクションのタイトルです。 - 存在しない場合もあります。 - text (str) - 段落のテキスト本文です。 - abstract (str | None) - 記事の要約です。 - ない場合もあります。 - wikitext (str) - wikitextによって抽出された記事本文です。比較・解析精度向上に資する目的で'text'と併存しています。 - date_created (str) - 記事が作成された日付です。 - date_modified (str) - 記事が最後に編集された日付です。 - is_disambiguation_page (bool) - 曖昧さ回避のためのページかどうかを表す値です。'templates'に含まれる文字列から判別しています。 - is_sexual_page (bool) - 性的な内容を含むページかどうかを表す値です。'templates'に含まれる文字列から判別しています。 - is_violent_page (bool) - 暴力的な内容を含むページかどうかを表す値です。'templates'に含まれる文字列から判別しています。 - templates (list[str]) - 記事を作成する際に利用されたテンプレートのリストです。 - url (str) データセットの作成にあたり、singletongue/wikipedia-utilsを参考に実装を行いました。 この場を借りて感謝申し上げます。
[ "# JaWiki\n\nWikipediaのHTML形式のダンプファイルから抽出したテキストデータセットです。\nWikiextractorによって抽出したテキストデータと異なり、段落などの文書構造を維持したまま、不要なマークアップのないテキストが利用できます。\n\nダンプファイルは、2024年1月1日に公開されたものを利用しています。\nまた、各種NLPタスクに利用しやすいよう、様々なデータを同梱しています。\n\n\n各種前処理スクリプトはGitHubのリポジトリをご参照ください。", "## データ構造\n\n各レコードはWikipediaの記事一つに対応しています。\n大まかなデータ構造と説明を以下に示します。\n\n\n- id (int)\n- title (str)\n - 記事タイトルです。\n- text (str)\n - 各段落の文章('paragraphs'の'text')を改行で結合したテキストです。\n- paragraphs (list[dict[str, int | str]])\n - 記事中の段落の集合です。各段落は辞書型で表現されており、以下のデータ構造に基づきます。\n - paragraph_id (int)\n - 記事中で何番目の段落かを示す番号です。\n - tag (str)\n - 当該段落をマークアップしていたHTMLタグの名称です。\n - title (str | None)\n - 当該段落を含むセクションのタイトルです。\n - 存在しない場合もあります。\n - text (str)\n - 段落のテキスト本文です。\n- abstract (str | None)\n - 記事の要約です。\n - ない場合もあります。\n- wikitext (str)\n - wikitextによって抽出された記事本文です。比較・解析精度向上に資する目的で'text'と併存しています。\n- date_created (str)\n - 記事が作成された日付です。\n- date_modified (str)\n - 記事が最後に編集された日付です。\n- is_disambiguation_page (bool)\n - 曖昧さ回避のためのページかどうかを表す値です。'templates'に含まれる文字列から判別しています。\n- is_sexual_page (bool)\n - 性的な内容を含むページかどうかを表す値です。'templates'に含まれる文字列から判別しています。\n- is_violent_page (bool)\n - 暴力的な内容を含むページかどうかを表す値です。'templates'に含まれる文字列から判別しています。\n- templates (list[str])\n - 記事を作成する際に利用されたテンプレートのリストです。\n- url (str)\n\n\nデータセットの作成にあたり、singletongue/wikipedia-utilsを参考に実装を行いました。\nこの場を借りて感謝申し上げます。" ]
[ "TAGS\n#language-Japanese #license-cc-by-sa-3.0 #license-gfdl #region-us \n", "# JaWiki\n\nWikipediaのHTML形式のダンプファイルから抽出したテキストデータセットです。\nWikiextractorによって抽出したテキストデータと異なり、段落などの文書構造を維持したまま、不要なマークアップのないテキストが利用できます。\n\nダンプファイルは、2024年1月1日に公開されたものを利用しています。\nまた、各種NLPタスクに利用しやすいよう、様々なデータを同梱しています。\n\n\n各種前処理スクリプトはGitHubのリポジトリをご参照ください。", "## データ構造\n\n各レコードはWikipediaの記事一つに対応しています。\n大まかなデータ構造と説明を以下に示します。\n\n\n- id (int)\n- title (str)\n - 記事タイトルです。\n- text (str)\n - 各段落の文章('paragraphs'の'text')を改行で結合したテキストです。\n- paragraphs (list[dict[str, int | str]])\n - 記事中の段落の集合です。各段落は辞書型で表現されており、以下のデータ構造に基づきます。\n - paragraph_id (int)\n - 記事中で何番目の段落かを示す番号です。\n - tag (str)\n - 当該段落をマークアップしていたHTMLタグの名称です。\n - title (str | None)\n - 当該段落を含むセクションのタイトルです。\n - 存在しない場合もあります。\n - text (str)\n - 段落のテキスト本文です。\n- abstract (str | None)\n - 記事の要約です。\n - ない場合もあります。\n- wikitext (str)\n - wikitextによって抽出された記事本文です。比較・解析精度向上に資する目的で'text'と併存しています。\n- date_created (str)\n - 記事が作成された日付です。\n- date_modified (str)\n - 記事が最後に編集された日付です。\n- is_disambiguation_page (bool)\n - 曖昧さ回避のためのページかどうかを表す値です。'templates'に含まれる文字列から判別しています。\n- is_sexual_page (bool)\n - 性的な内容を含むページかどうかを表す値です。'templates'に含まれる文字列から判別しています。\n- is_violent_page (bool)\n - 暴力的な内容を含むページかどうかを表す値です。'templates'に含まれる文字列から判別しています。\n- templates (list[str])\n - 記事を作成する際に利用されたテンプレートのリストです。\n- url (str)\n\n\nデータセットの作成にあたり、singletongue/wikipedia-utilsを参考に実装を行いました。\nこの場を借りて感謝申し上げます。" ]
7c70878c0e2c79bb539fa7272e5ce6ffcc8cb3d4
# Dataset Card for "wmt16_small" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
DualAuthor/wmt16_small
[ "region:us" ]
2024-02-02T06:37:22+00:00
{"dataset_info": {"features": [{"name": "translation", "struct": [{"name": "de", "dtype": "string"}, {"name": "en", "dtype": "string"}]}], "splits": [{"name": "train", "num_bytes": 22584, "num_examples": 100}], "download_size": 19788, "dataset_size": 22584}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}]}
2024-02-02T06:37:28+00:00
[]
[]
TAGS #region-us
# Dataset Card for "wmt16_small" More Information needed
[ "# Dataset Card for \"wmt16_small\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"wmt16_small\"\n\nMore Information needed" ]
1f5a10445aca4cd07e5dc12dd12134f15e511eed
# Dataset Card for Evaluation run of Weyaxi/Qwen-72B-Llama <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [Weyaxi/Qwen-72B-Llama](https://huggingface.co/Weyaxi/Qwen-72B-Llama) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_Weyaxi__Qwen-72B-Llama", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-02-02T06:36:25.719099](https://huggingface.co/datasets/open-llm-leaderboard/details_Weyaxi__Qwen-72B-Llama/blob/main/results_2024-02-02T06-36-25.719099.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.7316507461013225, "acc_stderr": 0.02876333054548078, "acc_norm": 0.7367632357692079, "acc_norm_stderr": 0.029299960559549205, "mc1": 0.40514075887392903, "mc1_stderr": 0.01718561172775337, "mc2": 0.5759942995358094, "mc2_stderr": 0.015126059763108792 }, "harness|arc:challenge|25": { "acc": 0.6168941979522184, "acc_stderr": 0.014206472661672877, "acc_norm": 0.6484641638225256, "acc_norm_stderr": 0.013952413699600931 }, "harness|hellaswag|10": { "acc": 0.6480780720971918, "acc_stderr": 0.004765937515197188, "acc_norm": 0.8327026488747261, "acc_norm_stderr": 0.00372478338925333 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.36, "acc_stderr": 0.04824181513244218, "acc_norm": 0.36, "acc_norm_stderr": 0.04824181513244218 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.6962962962962963, "acc_stderr": 0.039725528847851355, "acc_norm": 0.6962962962962963, "acc_norm_stderr": 0.039725528847851355 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.875, "acc_stderr": 0.026913523521537846, "acc_norm": 0.875, "acc_norm_stderr": 0.026913523521537846 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.78, "acc_stderr": 0.04163331998932262, "acc_norm": 0.78, "acc_norm_stderr": 0.04163331998932262 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.8113207547169812, "acc_stderr": 0.02407999513006224, "acc_norm": 0.8113207547169812, "acc_norm_stderr": 0.02407999513006224 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.9027777777777778, "acc_stderr": 0.02477451625044018, "acc_norm": 0.9027777777777778, "acc_norm_stderr": 0.02477451625044018 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.54, "acc_stderr": 0.05009082659620332, "acc_norm": 0.54, "acc_norm_stderr": 0.05009082659620332 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.64, "acc_stderr": 0.048241815132442176, "acc_norm": 0.64, "acc_norm_stderr": 0.048241815132442176 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.43, "acc_stderr": 0.04975698519562428, "acc_norm": 0.43, "acc_norm_stderr": 0.04975698519562428 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.7745664739884393, "acc_stderr": 0.03186209851641143, "acc_norm": 0.7745664739884393, "acc_norm_stderr": 0.03186209851641143 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.4411764705882353, "acc_stderr": 0.049406356306056595, "acc_norm": 0.4411764705882353, "acc_norm_stderr": 0.049406356306056595 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.84, "acc_stderr": 0.036845294917747094, "acc_norm": 0.84, "acc_norm_stderr": 0.036845294917747094 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.8170212765957446, "acc_stderr": 0.025276041000449966, "acc_norm": 0.8170212765957446, "acc_norm_stderr": 0.025276041000449966 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.5614035087719298, "acc_stderr": 0.04668000738510455, "acc_norm": 0.5614035087719298, "acc_norm_stderr": 0.04668000738510455 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.8, "acc_stderr": 0.0333333333333333, "acc_norm": 0.8, "acc_norm_stderr": 0.0333333333333333 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.6851851851851852, "acc_stderr": 0.023919984164047732, "acc_norm": 0.6851851851851852, "acc_norm_stderr": 0.023919984164047732 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.4603174603174603, "acc_stderr": 0.04458029125470973, "acc_norm": 0.4603174603174603, "acc_norm_stderr": 0.04458029125470973 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.54, "acc_stderr": 0.05009082659620333, "acc_norm": 0.54, "acc_norm_stderr": 0.05009082659620333 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.864516129032258, "acc_stderr": 0.019469334586486933, "acc_norm": 0.864516129032258, "acc_norm_stderr": 0.019469334586486933 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.6945812807881774, "acc_stderr": 0.03240661565868408, "acc_norm": 0.6945812807881774, "acc_norm_stderr": 0.03240661565868408 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.79, "acc_stderr": 0.040936018074033256, "acc_norm": 0.79, "acc_norm_stderr": 0.040936018074033256 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.22424242424242424, "acc_stderr": 0.032568666616811015, "acc_norm": 0.22424242424242424, "acc_norm_stderr": 0.032568666616811015 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.9141414141414141, "acc_stderr": 0.01996022556317289, "acc_norm": 0.9141414141414141, "acc_norm_stderr": 0.01996022556317289 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.9844559585492227, "acc_stderr": 0.008927492715084317, "acc_norm": 0.9844559585492227, "acc_norm_stderr": 0.008927492715084317 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.7948717948717948, "acc_stderr": 0.02047323317355199, "acc_norm": 0.7948717948717948, "acc_norm_stderr": 0.02047323317355199 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.4444444444444444, "acc_stderr": 0.030296771286067323, "acc_norm": 0.4444444444444444, "acc_norm_stderr": 0.030296771286067323 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.8025210084033614, "acc_stderr": 0.025859164122051453, "acc_norm": 0.8025210084033614, "acc_norm_stderr": 0.025859164122051453 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.4966887417218543, "acc_stderr": 0.04082393379449654, "acc_norm": 0.4966887417218543, "acc_norm_stderr": 0.04082393379449654 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.9211009174311927, "acc_stderr": 0.011558198113769553, "acc_norm": 0.9211009174311927, "acc_norm_stderr": 0.011558198113769553 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.6620370370370371, "acc_stderr": 0.03225941352631295, "acc_norm": 0.6620370370370371, "acc_norm_stderr": 0.03225941352631295 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.43137254901960786, "acc_stderr": 0.03476099060501636, "acc_norm": 0.43137254901960786, "acc_norm_stderr": 0.03476099060501636 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.8396624472573839, "acc_stderr": 0.02388438092596567, "acc_norm": 0.8396624472573839, "acc_norm_stderr": 0.02388438092596567 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.7982062780269058, "acc_stderr": 0.02693611191280227, "acc_norm": 0.7982062780269058, "acc_norm_stderr": 0.02693611191280227 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.8854961832061069, "acc_stderr": 0.027927473753597453, "acc_norm": 0.8854961832061069, "acc_norm_stderr": 0.027927473753597453 }, "harness|hendrycksTest-international_law|5": { "acc": 0.859504132231405, "acc_stderr": 0.03172233426002158, "acc_norm": 0.859504132231405, "acc_norm_stderr": 0.03172233426002158 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.8055555555555556, "acc_stderr": 0.03826076324884866, "acc_norm": 0.8055555555555556, "acc_norm_stderr": 0.03826076324884866 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.8404907975460123, "acc_stderr": 0.02876748172598386, "acc_norm": 0.8404907975460123, "acc_norm_stderr": 0.02876748172598386 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.5892857142857143, "acc_stderr": 0.04669510663875191, "acc_norm": 0.5892857142857143, "acc_norm_stderr": 0.04669510663875191 }, "harness|hendrycksTest-management|5": { "acc": 0.8932038834951457, "acc_stderr": 0.030581088928331352, "acc_norm": 0.8932038834951457, "acc_norm_stderr": 0.030581088928331352 }, "harness|hendrycksTest-marketing|5": { "acc": 0.9273504273504274, "acc_stderr": 0.01700436856813234, "acc_norm": 0.9273504273504274, "acc_norm_stderr": 0.01700436856813234 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.84, "acc_stderr": 0.0368452949177471, "acc_norm": 0.84, "acc_norm_stderr": 0.0368452949177471 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.9169859514687101, "acc_stderr": 0.009866287394639552, "acc_norm": 0.9169859514687101, "acc_norm_stderr": 0.009866287394639552 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.8208092485549133, "acc_stderr": 0.02064759002967933, "acc_norm": 0.8208092485549133, "acc_norm_stderr": 0.02064759002967933 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.7832402234636872, "acc_stderr": 0.013780598486443356, "acc_norm": 0.7832402234636872, "acc_norm_stderr": 0.013780598486443356 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.8464052287581699, "acc_stderr": 0.020645597910418763, "acc_norm": 0.8464052287581699, "acc_norm_stderr": 0.020645597910418763 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.8360128617363344, "acc_stderr": 0.021029576464662695, "acc_norm": 0.8360128617363344, "acc_norm_stderr": 0.021029576464662695 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.8641975308641975, "acc_stderr": 0.01906158818150541, "acc_norm": 0.8641975308641975, "acc_norm_stderr": 0.01906158818150541 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.6028368794326241, "acc_stderr": 0.02918980567358709, "acc_norm": 0.6028368794326241, "acc_norm_stderr": 0.02918980567358709 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.5827900912646675, "acc_stderr": 0.012593959992906426, "acc_norm": 0.5827900912646675, "acc_norm_stderr": 0.012593959992906426 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.8161764705882353, "acc_stderr": 0.023529242185193106, "acc_norm": 0.8161764705882353, "acc_norm_stderr": 0.023529242185193106 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.7990196078431373, "acc_stderr": 0.01621193888965559, "acc_norm": 0.7990196078431373, "acc_norm_stderr": 0.01621193888965559 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.7818181818181819, "acc_stderr": 0.03955932861795833, "acc_norm": 0.7818181818181819, "acc_norm_stderr": 0.03955932861795833 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.7836734693877551, "acc_stderr": 0.026358916334904017, "acc_norm": 0.7836734693877551, "acc_norm_stderr": 0.026358916334904017 }, "harness|hendrycksTest-sociology|5": { "acc": 0.8756218905472637, "acc_stderr": 0.023335401790166323, "acc_norm": 0.8756218905472637, "acc_norm_stderr": 0.023335401790166323 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.97, "acc_stderr": 0.01714466079977655, "acc_norm": 0.97, "acc_norm_stderr": 0.01714466079977655 }, "harness|hendrycksTest-virology|5": { "acc": 0.572289156626506, "acc_stderr": 0.038515976837185335, "acc_norm": 0.572289156626506, "acc_norm_stderr": 0.038515976837185335 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.8771929824561403, "acc_stderr": 0.02517298435015577, "acc_norm": 0.8771929824561403, "acc_norm_stderr": 0.02517298435015577 }, "harness|truthfulqa:mc|0": { "mc1": 0.40514075887392903, "mc1_stderr": 0.01718561172775337, "mc2": 0.5759942995358094, "mc2_stderr": 0.015126059763108792 }, "harness|winogrande|5": { "acc": 0.8153117600631413, "acc_stderr": 0.010905978112156885 }, "harness|gsm8k|5": { "acc": 0.5625473843821076, "acc_stderr": 0.013664299060751915 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_Weyaxi__Qwen-72B-Llama
[ "region:us" ]
2024-02-02T06:38:37+00:00
{"pretty_name": "Evaluation run of Weyaxi/Qwen-72B-Llama", "dataset_summary": "Dataset automatically created during the evaluation run of model [Weyaxi/Qwen-72B-Llama](https://huggingface.co/Weyaxi/Qwen-72B-Llama) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_Weyaxi__Qwen-72B-Llama\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-02-02T06:36:25.719099](https://huggingface.co/datasets/open-llm-leaderboard/details_Weyaxi__Qwen-72B-Llama/blob/main/results_2024-02-02T06-36-25.719099.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.7316507461013225,\n \"acc_stderr\": 0.02876333054548078,\n \"acc_norm\": 0.7367632357692079,\n \"acc_norm_stderr\": 0.029299960559549205,\n \"mc1\": 0.40514075887392903,\n \"mc1_stderr\": 0.01718561172775337,\n \"mc2\": 0.5759942995358094,\n \"mc2_stderr\": 0.015126059763108792\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.6168941979522184,\n \"acc_stderr\": 0.014206472661672877,\n \"acc_norm\": 0.6484641638225256,\n \"acc_norm_stderr\": 0.013952413699600931\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.6480780720971918,\n \"acc_stderr\": 0.004765937515197188,\n \"acc_norm\": 0.8327026488747261,\n \"acc_norm_stderr\": 0.00372478338925333\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.36,\n \"acc_stderr\": 0.04824181513244218,\n \"acc_norm\": 0.36,\n \"acc_norm_stderr\": 0.04824181513244218\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.6962962962962963,\n \"acc_stderr\": 0.039725528847851355,\n \"acc_norm\": 0.6962962962962963,\n \"acc_norm_stderr\": 0.039725528847851355\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.875,\n \"acc_stderr\": 0.026913523521537846,\n \"acc_norm\": 0.875,\n \"acc_norm_stderr\": 0.026913523521537846\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.78,\n \"acc_stderr\": 0.04163331998932262,\n \"acc_norm\": 0.78,\n \"acc_norm_stderr\": 0.04163331998932262\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.8113207547169812,\n \"acc_stderr\": 0.02407999513006224,\n \"acc_norm\": 0.8113207547169812,\n \"acc_norm_stderr\": 0.02407999513006224\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.9027777777777778,\n \"acc_stderr\": 0.02477451625044018,\n \"acc_norm\": 0.9027777777777778,\n \"acc_norm_stderr\": 0.02477451625044018\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.54,\n \"acc_stderr\": 0.05009082659620332,\n \"acc_norm\": 0.54,\n \"acc_norm_stderr\": 0.05009082659620332\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.64,\n \"acc_stderr\": 0.048241815132442176,\n \"acc_norm\": 0.64,\n \"acc_norm_stderr\": 0.048241815132442176\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.43,\n \"acc_stderr\": 0.04975698519562428,\n \"acc_norm\": 0.43,\n \"acc_norm_stderr\": 0.04975698519562428\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.7745664739884393,\n \"acc_stderr\": 0.03186209851641143,\n \"acc_norm\": 0.7745664739884393,\n \"acc_norm_stderr\": 0.03186209851641143\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.4411764705882353,\n \"acc_stderr\": 0.049406356306056595,\n \"acc_norm\": 0.4411764705882353,\n \"acc_norm_stderr\": 0.049406356306056595\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.84,\n \"acc_stderr\": 0.036845294917747094,\n \"acc_norm\": 0.84,\n \"acc_norm_stderr\": 0.036845294917747094\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.8170212765957446,\n \"acc_stderr\": 0.025276041000449966,\n \"acc_norm\": 0.8170212765957446,\n \"acc_norm_stderr\": 0.025276041000449966\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.5614035087719298,\n \"acc_stderr\": 0.04668000738510455,\n \"acc_norm\": 0.5614035087719298,\n \"acc_norm_stderr\": 0.04668000738510455\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.8,\n \"acc_stderr\": 0.0333333333333333,\n \"acc_norm\": 0.8,\n \"acc_norm_stderr\": 0.0333333333333333\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.6851851851851852,\n \"acc_stderr\": 0.023919984164047732,\n \"acc_norm\": 0.6851851851851852,\n \"acc_norm_stderr\": 0.023919984164047732\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.4603174603174603,\n \"acc_stderr\": 0.04458029125470973,\n \"acc_norm\": 0.4603174603174603,\n \"acc_norm_stderr\": 0.04458029125470973\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.54,\n \"acc_stderr\": 0.05009082659620333,\n \"acc_norm\": 0.54,\n \"acc_norm_stderr\": 0.05009082659620333\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.864516129032258,\n \"acc_stderr\": 0.019469334586486933,\n \"acc_norm\": 0.864516129032258,\n \"acc_norm_stderr\": 0.019469334586486933\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.6945812807881774,\n \"acc_stderr\": 0.03240661565868408,\n \"acc_norm\": 0.6945812807881774,\n \"acc_norm_stderr\": 0.03240661565868408\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.79,\n \"acc_stderr\": 0.040936018074033256,\n \"acc_norm\": 0.79,\n \"acc_norm_stderr\": 0.040936018074033256\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.22424242424242424,\n \"acc_stderr\": 0.032568666616811015,\n \"acc_norm\": 0.22424242424242424,\n \"acc_norm_stderr\": 0.032568666616811015\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.9141414141414141,\n \"acc_stderr\": 0.01996022556317289,\n \"acc_norm\": 0.9141414141414141,\n \"acc_norm_stderr\": 0.01996022556317289\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.9844559585492227,\n \"acc_stderr\": 0.008927492715084317,\n \"acc_norm\": 0.9844559585492227,\n \"acc_norm_stderr\": 0.008927492715084317\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.7948717948717948,\n \"acc_stderr\": 0.02047323317355199,\n \"acc_norm\": 0.7948717948717948,\n \"acc_norm_stderr\": 0.02047323317355199\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.4444444444444444,\n \"acc_stderr\": 0.030296771286067323,\n \"acc_norm\": 0.4444444444444444,\n \"acc_norm_stderr\": 0.030296771286067323\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.8025210084033614,\n \"acc_stderr\": 0.025859164122051453,\n \"acc_norm\": 0.8025210084033614,\n \"acc_norm_stderr\": 0.025859164122051453\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.4966887417218543,\n \"acc_stderr\": 0.04082393379449654,\n \"acc_norm\": 0.4966887417218543,\n \"acc_norm_stderr\": 0.04082393379449654\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.9211009174311927,\n \"acc_stderr\": 0.011558198113769553,\n \"acc_norm\": 0.9211009174311927,\n \"acc_norm_stderr\": 0.011558198113769553\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.6620370370370371,\n \"acc_stderr\": 0.03225941352631295,\n \"acc_norm\": 0.6620370370370371,\n \"acc_norm_stderr\": 0.03225941352631295\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.43137254901960786,\n \"acc_stderr\": 0.03476099060501636,\n \"acc_norm\": 0.43137254901960786,\n \"acc_norm_stderr\": 0.03476099060501636\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.8396624472573839,\n \"acc_stderr\": 0.02388438092596567,\n \"acc_norm\": 0.8396624472573839,\n \"acc_norm_stderr\": 0.02388438092596567\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.7982062780269058,\n \"acc_stderr\": 0.02693611191280227,\n \"acc_norm\": 0.7982062780269058,\n \"acc_norm_stderr\": 0.02693611191280227\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.8854961832061069,\n \"acc_stderr\": 0.027927473753597453,\n \"acc_norm\": 0.8854961832061069,\n \"acc_norm_stderr\": 0.027927473753597453\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.859504132231405,\n \"acc_stderr\": 0.03172233426002158,\n \"acc_norm\": 0.859504132231405,\n \"acc_norm_stderr\": 0.03172233426002158\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.8055555555555556,\n \"acc_stderr\": 0.03826076324884866,\n \"acc_norm\": 0.8055555555555556,\n \"acc_norm_stderr\": 0.03826076324884866\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.8404907975460123,\n \"acc_stderr\": 0.02876748172598386,\n \"acc_norm\": 0.8404907975460123,\n \"acc_norm_stderr\": 0.02876748172598386\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.5892857142857143,\n \"acc_stderr\": 0.04669510663875191,\n \"acc_norm\": 0.5892857142857143,\n \"acc_norm_stderr\": 0.04669510663875191\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.8932038834951457,\n \"acc_stderr\": 0.030581088928331352,\n \"acc_norm\": 0.8932038834951457,\n \"acc_norm_stderr\": 0.030581088928331352\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.9273504273504274,\n \"acc_stderr\": 0.01700436856813234,\n \"acc_norm\": 0.9273504273504274,\n \"acc_norm_stderr\": 0.01700436856813234\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.84,\n \"acc_stderr\": 0.0368452949177471,\n \"acc_norm\": 0.84,\n \"acc_norm_stderr\": 0.0368452949177471\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.9169859514687101,\n \"acc_stderr\": 0.009866287394639552,\n \"acc_norm\": 0.9169859514687101,\n \"acc_norm_stderr\": 0.009866287394639552\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.8208092485549133,\n \"acc_stderr\": 0.02064759002967933,\n \"acc_norm\": 0.8208092485549133,\n \"acc_norm_stderr\": 0.02064759002967933\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.7832402234636872,\n \"acc_stderr\": 0.013780598486443356,\n \"acc_norm\": 0.7832402234636872,\n \"acc_norm_stderr\": 0.013780598486443356\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.8464052287581699,\n \"acc_stderr\": 0.020645597910418763,\n \"acc_norm\": 0.8464052287581699,\n \"acc_norm_stderr\": 0.020645597910418763\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.8360128617363344,\n \"acc_stderr\": 0.021029576464662695,\n \"acc_norm\": 0.8360128617363344,\n \"acc_norm_stderr\": 0.021029576464662695\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.8641975308641975,\n \"acc_stderr\": 0.01906158818150541,\n \"acc_norm\": 0.8641975308641975,\n \"acc_norm_stderr\": 0.01906158818150541\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.6028368794326241,\n \"acc_stderr\": 0.02918980567358709,\n \"acc_norm\": 0.6028368794326241,\n \"acc_norm_stderr\": 0.02918980567358709\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.5827900912646675,\n \"acc_stderr\": 0.012593959992906426,\n \"acc_norm\": 0.5827900912646675,\n \"acc_norm_stderr\": 0.012593959992906426\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.8161764705882353,\n \"acc_stderr\": 0.023529242185193106,\n \"acc_norm\": 0.8161764705882353,\n \"acc_norm_stderr\": 0.023529242185193106\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.7990196078431373,\n \"acc_stderr\": 0.01621193888965559,\n \"acc_norm\": 0.7990196078431373,\n \"acc_norm_stderr\": 0.01621193888965559\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.7818181818181819,\n \"acc_stderr\": 0.03955932861795833,\n \"acc_norm\": 0.7818181818181819,\n \"acc_norm_stderr\": 0.03955932861795833\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.7836734693877551,\n \"acc_stderr\": 0.026358916334904017,\n \"acc_norm\": 0.7836734693877551,\n \"acc_norm_stderr\": 0.026358916334904017\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.8756218905472637,\n \"acc_stderr\": 0.023335401790166323,\n \"acc_norm\": 0.8756218905472637,\n \"acc_norm_stderr\": 0.023335401790166323\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.97,\n \"acc_stderr\": 0.01714466079977655,\n \"acc_norm\": 0.97,\n \"acc_norm_stderr\": 0.01714466079977655\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.572289156626506,\n \"acc_stderr\": 0.038515976837185335,\n \"acc_norm\": 0.572289156626506,\n \"acc_norm_stderr\": 0.038515976837185335\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.8771929824561403,\n \"acc_stderr\": 0.02517298435015577,\n \"acc_norm\": 0.8771929824561403,\n \"acc_norm_stderr\": 0.02517298435015577\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.40514075887392903,\n \"mc1_stderr\": 0.01718561172775337,\n \"mc2\": 0.5759942995358094,\n \"mc2_stderr\": 0.015126059763108792\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.8153117600631413,\n \"acc_stderr\": 0.010905978112156885\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.5625473843821076,\n \"acc_stderr\": 0.013664299060751915\n }\n}\n```", "repo_url": "https://huggingface.co/Weyaxi/Qwen-72B-Llama", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_02_02T06_36_25.719099", "path": ["**/details_harness|arc:challenge|25_2024-02-02T06-36-25.719099.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-02-02T06-36-25.719099.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_02_02T06_36_25.719099", "path": ["**/details_harness|gsm8k|5_2024-02-02T06-36-25.719099.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-02-02T06-36-25.719099.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_02_02T06_36_25.719099", "path": ["**/details_harness|hellaswag|10_2024-02-02T06-36-25.719099.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-02-02T06-36-25.719099.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_02_02T06_36_25.719099", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-02T06-36-25.719099.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-02-02T06-36-25.719099.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-02-02T06-36-25.719099.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-02-02T06-36-25.719099.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-02T06-36-25.719099.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-02-02T06-36-25.719099.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-02-02T06-36-25.719099.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-02-02T06-36-25.719099.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-02-02T06-36-25.719099.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-02-02T06-36-25.719099.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-02-02T06-36-25.719099.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-02-02T06-36-25.719099.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-02T06-36-25.719099.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-02-02T06-36-25.719099.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-02T06-36-25.719099.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-02T06-36-25.719099.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-02-02T06-36-25.719099.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-02-02T06-36-25.719099.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-02-02T06-36-25.719099.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-02T06-36-25.719099.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-02T06-36-25.719099.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-02T06-36-25.719099.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-02-02T06-36-25.719099.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-02T06-36-25.719099.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-02T06-36-25.719099.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-02T06-36-25.719099.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-02T06-36-25.719099.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-02-02T06-36-25.719099.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-02T06-36-25.719099.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-02T06-36-25.719099.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-02T06-36-25.719099.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-02T06-36-25.719099.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-02-02T06-36-25.719099.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-02-02T06-36-25.719099.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-02-02T06-36-25.719099.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-02-02T06-36-25.719099.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-02T06-36-25.719099.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-02-02T06-36-25.719099.parquet", "**/details_harness|hendrycksTest-management|5_2024-02-02T06-36-25.719099.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-02-02T06-36-25.719099.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-02-02T06-36-25.719099.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-02-02T06-36-25.719099.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-02-02T06-36-25.719099.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-02T06-36-25.719099.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-02-02T06-36-25.719099.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-02-02T06-36-25.719099.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-02-02T06-36-25.719099.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-02-02T06-36-25.719099.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-02-02T06-36-25.719099.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-02-02T06-36-25.719099.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-02-02T06-36-25.719099.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-02-02T06-36-25.719099.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-02-02T06-36-25.719099.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-02-02T06-36-25.719099.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-02T06-36-25.719099.parquet", "**/details_harness|hendrycksTest-virology|5_2024-02-02T06-36-25.719099.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-02-02T06-36-25.719099.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-02T06-36-25.719099.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-02-02T06-36-25.719099.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-02-02T06-36-25.719099.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-02-02T06-36-25.719099.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-02T06-36-25.719099.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-02-02T06-36-25.719099.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-02-02T06-36-25.719099.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-02-02T06-36-25.719099.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-02-02T06-36-25.719099.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-02-02T06-36-25.719099.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-02-02T06-36-25.719099.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-02-02T06-36-25.719099.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-02T06-36-25.719099.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-02-02T06-36-25.719099.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-02T06-36-25.719099.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-02T06-36-25.719099.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-02-02T06-36-25.719099.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-02-02T06-36-25.719099.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-02-02T06-36-25.719099.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-02T06-36-25.719099.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-02T06-36-25.719099.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-02T06-36-25.719099.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-02-02T06-36-25.719099.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-02T06-36-25.719099.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-02T06-36-25.719099.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-02T06-36-25.719099.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-02T06-36-25.719099.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-02-02T06-36-25.719099.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-02T06-36-25.719099.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-02T06-36-25.719099.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-02T06-36-25.719099.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-02T06-36-25.719099.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-02-02T06-36-25.719099.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-02-02T06-36-25.719099.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-02-02T06-36-25.719099.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-02-02T06-36-25.719099.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-02T06-36-25.719099.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-02-02T06-36-25.719099.parquet", "**/details_harness|hendrycksTest-management|5_2024-02-02T06-36-25.719099.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-02-02T06-36-25.719099.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-02-02T06-36-25.719099.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-02-02T06-36-25.719099.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-02-02T06-36-25.719099.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-02T06-36-25.719099.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-02-02T06-36-25.719099.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-02-02T06-36-25.719099.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-02-02T06-36-25.719099.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-02-02T06-36-25.719099.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-02-02T06-36-25.719099.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-02-02T06-36-25.719099.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-02-02T06-36-25.719099.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-02-02T06-36-25.719099.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-02-02T06-36-25.719099.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-02-02T06-36-25.719099.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-02T06-36-25.719099.parquet", "**/details_harness|hendrycksTest-virology|5_2024-02-02T06-36-25.719099.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-02-02T06-36-25.719099.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_02_02T06_36_25.719099", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-02T06-36-25.719099.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-02T06-36-25.719099.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_02_02T06_36_25.719099", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-02-02T06-36-25.719099.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-02-02T06-36-25.719099.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_02_02T06_36_25.719099", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-02-02T06-36-25.719099.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-02-02T06-36-25.719099.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_02_02T06_36_25.719099", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-02-02T06-36-25.719099.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-02-02T06-36-25.719099.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_02_02T06_36_25.719099", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-02T06-36-25.719099.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-02T06-36-25.719099.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_02_02T06_36_25.719099", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-02-02T06-36-25.719099.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-02-02T06-36-25.719099.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_02_02T06_36_25.719099", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-02-02T06-36-25.719099.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-02-02T06-36-25.719099.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_02_02T06_36_25.719099", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-02-02T06-36-25.719099.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-02-02T06-36-25.719099.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_02_02T06_36_25.719099", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-02-02T06-36-25.719099.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-02-02T06-36-25.719099.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_02_02T06_36_25.719099", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-02-02T06-36-25.719099.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-02-02T06-36-25.719099.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_02_02T06_36_25.719099", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-02-02T06-36-25.719099.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-02-02T06-36-25.719099.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_02_02T06_36_25.719099", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-02-02T06-36-25.719099.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-02-02T06-36-25.719099.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_02_02T06_36_25.719099", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-02T06-36-25.719099.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-02T06-36-25.719099.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_02_02T06_36_25.719099", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-02-02T06-36-25.719099.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-02-02T06-36-25.719099.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_02_02T06_36_25.719099", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-02T06-36-25.719099.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-02T06-36-25.719099.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_02_02T06_36_25.719099", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-02T06-36-25.719099.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-02T06-36-25.719099.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_02_02T06_36_25.719099", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-02-02T06-36-25.719099.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-02-02T06-36-25.719099.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_02_02T06_36_25.719099", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-02-02T06-36-25.719099.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-02-02T06-36-25.719099.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_02_02T06_36_25.719099", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-02-02T06-36-25.719099.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-02-02T06-36-25.719099.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_02_02T06_36_25.719099", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-02T06-36-25.719099.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-02T06-36-25.719099.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_02_02T06_36_25.719099", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-02T06-36-25.719099.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-02T06-36-25.719099.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_02_02T06_36_25.719099", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-02T06-36-25.719099.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-02T06-36-25.719099.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_02_02T06_36_25.719099", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-02-02T06-36-25.719099.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-02-02T06-36-25.719099.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_02_02T06_36_25.719099", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-02T06-36-25.719099.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-02T06-36-25.719099.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_02_02T06_36_25.719099", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-02T06-36-25.719099.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-02T06-36-25.719099.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_02_02T06_36_25.719099", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-02T06-36-25.719099.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-02T06-36-25.719099.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_02_02T06_36_25.719099", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-02T06-36-25.719099.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-02T06-36-25.719099.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_02_02T06_36_25.719099", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-02-02T06-36-25.719099.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-02-02T06-36-25.719099.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_02_02T06_36_25.719099", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-02T06-36-25.719099.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-02T06-36-25.719099.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_02_02T06_36_25.719099", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-02T06-36-25.719099.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-02T06-36-25.719099.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_02_02T06_36_25.719099", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-02T06-36-25.719099.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-02T06-36-25.719099.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_02_02T06_36_25.719099", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-02T06-36-25.719099.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-02T06-36-25.719099.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_02_02T06_36_25.719099", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-02-02T06-36-25.719099.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-02-02T06-36-25.719099.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_02_02T06_36_25.719099", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-02-02T06-36-25.719099.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-02-02T06-36-25.719099.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_02_02T06_36_25.719099", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-02-02T06-36-25.719099.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-02-02T06-36-25.719099.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_02_02T06_36_25.719099", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-02-02T06-36-25.719099.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-02-02T06-36-25.719099.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_02_02T06_36_25.719099", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-02T06-36-25.719099.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-02T06-36-25.719099.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_02_02T06_36_25.719099", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-02-02T06-36-25.719099.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-02-02T06-36-25.719099.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_02_02T06_36_25.719099", "path": ["**/details_harness|hendrycksTest-management|5_2024-02-02T06-36-25.719099.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-02-02T06-36-25.719099.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_02_02T06_36_25.719099", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-02-02T06-36-25.719099.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-02-02T06-36-25.719099.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_02_02T06_36_25.719099", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-02-02T06-36-25.719099.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-02-02T06-36-25.719099.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_02_02T06_36_25.719099", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-02-02T06-36-25.719099.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-02-02T06-36-25.719099.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_02_02T06_36_25.719099", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-02-02T06-36-25.719099.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-02-02T06-36-25.719099.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_02_02T06_36_25.719099", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-02T06-36-25.719099.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-02T06-36-25.719099.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_02_02T06_36_25.719099", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-02-02T06-36-25.719099.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-02-02T06-36-25.719099.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_02_02T06_36_25.719099", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-02-02T06-36-25.719099.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-02-02T06-36-25.719099.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_02_02T06_36_25.719099", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-02-02T06-36-25.719099.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-02-02T06-36-25.719099.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_02_02T06_36_25.719099", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-02-02T06-36-25.719099.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-02-02T06-36-25.719099.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_02_02T06_36_25.719099", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-02-02T06-36-25.719099.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-02-02T06-36-25.719099.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_02_02T06_36_25.719099", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-02-02T06-36-25.719099.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-02-02T06-36-25.719099.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_02_02T06_36_25.719099", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-02-02T06-36-25.719099.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-02-02T06-36-25.719099.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_02_02T06_36_25.719099", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-02-02T06-36-25.719099.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-02-02T06-36-25.719099.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_02_02T06_36_25.719099", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-02-02T06-36-25.719099.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-02-02T06-36-25.719099.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_02_02T06_36_25.719099", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-02-02T06-36-25.719099.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-02-02T06-36-25.719099.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_02_02T06_36_25.719099", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-02T06-36-25.719099.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-02T06-36-25.719099.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_02_02T06_36_25.719099", "path": ["**/details_harness|hendrycksTest-virology|5_2024-02-02T06-36-25.719099.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-02-02T06-36-25.719099.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_02_02T06_36_25.719099", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-02-02T06-36-25.719099.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-02-02T06-36-25.719099.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_02_02T06_36_25.719099", "path": ["**/details_harness|truthfulqa:mc|0_2024-02-02T06-36-25.719099.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-02-02T06-36-25.719099.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_02_02T06_36_25.719099", "path": ["**/details_harness|winogrande|5_2024-02-02T06-36-25.719099.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-02-02T06-36-25.719099.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_02_02T06_36_25.719099", "path": ["results_2024-02-02T06-36-25.719099.parquet"]}, {"split": "latest", "path": ["results_2024-02-02T06-36-25.719099.parquet"]}]}]}
2024-02-02T06:39:02+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of Weyaxi/Qwen-72B-Llama Dataset automatically created during the evaluation run of model Weyaxi/Qwen-72B-Llama on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-02-02T06:36:25.719099(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of Weyaxi/Qwen-72B-Llama\n\n\n\nDataset automatically created during the evaluation run of model Weyaxi/Qwen-72B-Llama on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-02-02T06:36:25.719099(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of Weyaxi/Qwen-72B-Llama\n\n\n\nDataset automatically created during the evaluation run of model Weyaxi/Qwen-72B-Llama on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-02-02T06:36:25.719099(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
46ddacf3327d88de075f9951df264d2c7d408853
# Dataset Card for "gsm8k_small" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
DualAuthor/gsm8k_small
[ "region:us" ]
2024-02-02T06:39:12+00:00
{"dataset_info": {"features": [{"name": "question", "dtype": "string"}, {"name": "answer", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 53785, "num_examples": 100}], "download_size": 36085, "dataset_size": 53785}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}]}
2024-02-02T06:39:17+00:00
[]
[]
TAGS #region-us
# Dataset Card for "gsm8k_small" More Information needed
[ "# Dataset Card for \"gsm8k_small\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"gsm8k_small\"\n\nMore Information needed" ]
ba3f342379b8f14185386a65c17cdd7a67e39951
### Licensing Information The dataset is released under the terms of [ODC-BY](https://opendatacommons.org/licenses/by/1-0/). By using this, you are also bound to the respective Terms of Use and License of the original source. ### Citation Information Schwenk et al, CCMatrix: Mining Billions of High-Quality Parallel Sentences on the Web. ACL https://aclanthology.org/2021.acl-long.507/ Hefferman et al, Bitext Mining Using Distilled Sentence Representations for Low-Resource Languages. Arxiv https://arxiv.org/abs/2205.12654, 2022.<br> NLLB Team et al, No Language Left Behind: Scaling Human-Centered Machine Translation, Arxiv https://arxiv.org/abs/2207.04672, 2022. ### Contributions We thank the NLLB Meta AI team for open sourcing the meta data and instructions on how to use it with special thanks to Bapi Akula, Pierre Andrews, Onur Çelebi, Sergey Edunov, Kenneth Heafield, Philipp Koehn, Alex Mourachko, Safiyyah Saleem, Holger Schwenk, and Guillaume Wenzek. We also thank the AllenNLP team at AI2 for hosting and releasing this data, including Akshita Bhagia (for engineering efforts to host the data, and create the huggingface dataset), and Jesse Dodge (for organizing the connection).
NLPC-UOM/nllb-top25k-ensi-cleaned
[ "task_categories:translation", "size_categories:10K<n<100K", "language:en", "language:si", "license:odc-by", "arxiv:2205.12654", "arxiv:2207.04672", "region:us" ]
2024-02-02T06:50:03+00:00
{"language": ["en", "si"], "license": "odc-by", "size_categories": ["10K<n<100K"], "task_categories": ["translation"]}
2024-02-15T06:51:00+00:00
[ "2205.12654", "2207.04672" ]
[ "en", "si" ]
TAGS #task_categories-translation #size_categories-10K<n<100K #language-English #language-Sinhala #license-odc-by #arxiv-2205.12654 #arxiv-2207.04672 #region-us
### Licensing Information The dataset is released under the terms of ODC-BY. By using this, you are also bound to the respective Terms of Use and License of the original source. Schwenk et al, CCMatrix: Mining Billions of High-Quality Parallel Sentences on the Web. ACL URL Hefferman et al, Bitext Mining Using Distilled Sentence Representations for Low-Resource Languages. Arxiv URL 2022.<br> NLLB Team et al, No Language Left Behind: Scaling Human-Centered Machine Translation, Arxiv URL 2022. ### Contributions We thank the NLLB Meta AI team for open sourcing the meta data and instructions on how to use it with special thanks to Bapi Akula, Pierre Andrews, Onur Çelebi, Sergey Edunov, Kenneth Heafield, Philipp Koehn, Alex Mourachko, Safiyyah Saleem, Holger Schwenk, and Guillaume Wenzek. We also thank the AllenNLP team at AI2 for hosting and releasing this data, including Akshita Bhagia (for engineering efforts to host the data, and create the huggingface dataset), and Jesse Dodge (for organizing the connection).
[ "### Licensing Information\n\nThe dataset is released under the terms of ODC-BY. By using this, you are also bound to the respective Terms of Use and License of the original source.\n\n\n\n\nSchwenk et al, CCMatrix: Mining Billions of High-Quality Parallel Sentences on the Web. ACL URL\nHefferman et al, Bitext Mining Using Distilled Sentence Representations for Low-Resource Languages. Arxiv URL 2022.<br>\nNLLB Team et al, No Language Left Behind: Scaling Human-Centered Machine Translation, Arxiv URL 2022.", "### Contributions\n\nWe thank the NLLB Meta AI team for open sourcing the meta data and instructions on how to use it with special thanks to Bapi Akula, Pierre Andrews, Onur Çelebi, Sergey Edunov, Kenneth Heafield, Philipp Koehn, Alex Mourachko, Safiyyah Saleem, Holger Schwenk, and Guillaume Wenzek. We also thank the AllenNLP team at AI2 for hosting and releasing this data, including Akshita Bhagia (for engineering efforts to host the data, and create the huggingface dataset), and Jesse Dodge (for organizing the connection)." ]
[ "TAGS\n#task_categories-translation #size_categories-10K<n<100K #language-English #language-Sinhala #license-odc-by #arxiv-2205.12654 #arxiv-2207.04672 #region-us \n", "### Licensing Information\n\nThe dataset is released under the terms of ODC-BY. By using this, you are also bound to the respective Terms of Use and License of the original source.\n\n\n\n\nSchwenk et al, CCMatrix: Mining Billions of High-Quality Parallel Sentences on the Web. ACL URL\nHefferman et al, Bitext Mining Using Distilled Sentence Representations for Low-Resource Languages. Arxiv URL 2022.<br>\nNLLB Team et al, No Language Left Behind: Scaling Human-Centered Machine Translation, Arxiv URL 2022.", "### Contributions\n\nWe thank the NLLB Meta AI team for open sourcing the meta data and instructions on how to use it with special thanks to Bapi Akula, Pierre Andrews, Onur Çelebi, Sergey Edunov, Kenneth Heafield, Philipp Koehn, Alex Mourachko, Safiyyah Saleem, Holger Schwenk, and Guillaume Wenzek. We also thank the AllenNLP team at AI2 for hosting and releasing this data, including Akshita Bhagia (for engineering efforts to host the data, and create the huggingface dataset), and Jesse Dodge (for organizing the connection)." ]
a660da138ec1f622462e0d636f8f5cb497c43603
# Dataset Card for Evaluation run of allenai/tulu-2-dpo-70b <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [allenai/tulu-2-dpo-70b](https://huggingface.co/allenai/tulu-2-dpo-70b) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_allenai__tulu-2-dpo-70b", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-02-02T06:48:43.589029](https://huggingface.co/datasets/open-llm-leaderboard/details_allenai__tulu-2-dpo-70b/blob/main/results_2024-02-02T06-48-43.589029.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.699296658680991, "acc_stderr": 0.03051571429129605, "acc_norm": 0.7020037559735633, "acc_norm_stderr": 0.031114133505086575, "mc1": 0.4675642594859241, "mc1_stderr": 0.017466632149577613, "mc2": 0.6577655722264159, "mc2_stderr": 0.014903281756393213 }, "harness|arc:challenge|25": { "acc": 0.6825938566552902, "acc_stderr": 0.013602239088038167, "acc_norm": 0.7209897610921502, "acc_norm_stderr": 0.01310678488360134 }, "harness|hellaswag|10": { "acc": 0.7082254530969926, "acc_stderr": 0.004536500714147989, "acc_norm": 0.8898625771758614, "acc_norm_stderr": 0.0031242116171988606 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.3, "acc_stderr": 0.046056618647183814, "acc_norm": 0.3, "acc_norm_stderr": 0.046056618647183814 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.6148148148148148, "acc_stderr": 0.04203921040156279, "acc_norm": 0.6148148148148148, "acc_norm_stderr": 0.04203921040156279 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.7828947368421053, "acc_stderr": 0.03355045304882924, "acc_norm": 0.7828947368421053, "acc_norm_stderr": 0.03355045304882924 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.73, "acc_stderr": 0.0446196043338474, "acc_norm": 0.73, "acc_norm_stderr": 0.0446196043338474 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.7509433962264151, "acc_stderr": 0.02661648298050171, "acc_norm": 0.7509433962264151, "acc_norm_stderr": 0.02661648298050171 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.8263888888888888, "acc_stderr": 0.03167473383795718, "acc_norm": 0.8263888888888888, "acc_norm_stderr": 0.03167473383795718 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.51, "acc_stderr": 0.05024183937956911, "acc_norm": 0.51, "acc_norm_stderr": 0.05024183937956911 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.59, "acc_stderr": 0.04943110704237102, "acc_norm": 0.59, "acc_norm_stderr": 0.04943110704237102 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.39, "acc_stderr": 0.049020713000019756, "acc_norm": 0.39, "acc_norm_stderr": 0.049020713000019756 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.7225433526011561, "acc_stderr": 0.03414014007044037, "acc_norm": 0.7225433526011561, "acc_norm_stderr": 0.03414014007044037 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.43137254901960786, "acc_stderr": 0.04928099597287534, "acc_norm": 0.43137254901960786, "acc_norm_stderr": 0.04928099597287534 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.72, "acc_stderr": 0.045126085985421276, "acc_norm": 0.72, "acc_norm_stderr": 0.045126085985421276 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.6978723404255319, "acc_stderr": 0.03001755447188056, "acc_norm": 0.6978723404255319, "acc_norm_stderr": 0.03001755447188056 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.4473684210526316, "acc_stderr": 0.04677473004491199, "acc_norm": 0.4473684210526316, "acc_norm_stderr": 0.04677473004491199 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.5793103448275863, "acc_stderr": 0.04113914981189261, "acc_norm": 0.5793103448275863, "acc_norm_stderr": 0.04113914981189261 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.455026455026455, "acc_stderr": 0.025646928361049398, "acc_norm": 0.455026455026455, "acc_norm_stderr": 0.025646928361049398 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.48412698412698413, "acc_stderr": 0.04469881854072606, "acc_norm": 0.48412698412698413, "acc_norm_stderr": 0.04469881854072606 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.52, "acc_stderr": 0.050211673156867795, "acc_norm": 0.52, "acc_norm_stderr": 0.050211673156867795 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.7935483870967742, "acc_stderr": 0.023025899617188716, "acc_norm": 0.7935483870967742, "acc_norm_stderr": 0.023025899617188716 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.5270935960591133, "acc_stderr": 0.03512819077876106, "acc_norm": 0.5270935960591133, "acc_norm_stderr": 0.03512819077876106 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.78, "acc_stderr": 0.04163331998932261, "acc_norm": 0.78, "acc_norm_stderr": 0.04163331998932261 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.8363636363636363, "acc_stderr": 0.02888787239548795, "acc_norm": 0.8363636363636363, "acc_norm_stderr": 0.02888787239548795 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.8888888888888888, "acc_stderr": 0.02239078763821676, "acc_norm": 0.8888888888888888, "acc_norm_stderr": 0.02239078763821676 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.9378238341968912, "acc_stderr": 0.01742697415424052, "acc_norm": 0.9378238341968912, "acc_norm_stderr": 0.01742697415424052 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.7333333333333333, "acc_stderr": 0.022421273612923707, "acc_norm": 0.7333333333333333, "acc_norm_stderr": 0.022421273612923707 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.3592592592592593, "acc_stderr": 0.029252905927251976, "acc_norm": 0.3592592592592593, "acc_norm_stderr": 0.029252905927251976 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.8025210084033614, "acc_stderr": 0.025859164122051453, "acc_norm": 0.8025210084033614, "acc_norm_stderr": 0.025859164122051453 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.47019867549668876, "acc_stderr": 0.040752249922169775, "acc_norm": 0.47019867549668876, "acc_norm_stderr": 0.040752249922169775 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.8935779816513761, "acc_stderr": 0.013221554674594372, "acc_norm": 0.8935779816513761, "acc_norm_stderr": 0.013221554674594372 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.6064814814814815, "acc_stderr": 0.03331747876370312, "acc_norm": 0.6064814814814815, "acc_norm_stderr": 0.03331747876370312 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.9166666666666666, "acc_stderr": 0.019398452135813905, "acc_norm": 0.9166666666666666, "acc_norm_stderr": 0.019398452135813905 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.8607594936708861, "acc_stderr": 0.022535526352692705, "acc_norm": 0.8607594936708861, "acc_norm_stderr": 0.022535526352692705 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.7668161434977578, "acc_stderr": 0.028380391147094713, "acc_norm": 0.7668161434977578, "acc_norm_stderr": 0.028380391147094713 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.8320610687022901, "acc_stderr": 0.03278548537343138, "acc_norm": 0.8320610687022901, "acc_norm_stderr": 0.03278548537343138 }, "harness|hendrycksTest-international_law|5": { "acc": 0.8677685950413223, "acc_stderr": 0.0309227883204458, "acc_norm": 0.8677685950413223, "acc_norm_stderr": 0.0309227883204458 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.8425925925925926, "acc_stderr": 0.035207039905179635, "acc_norm": 0.8425925925925926, "acc_norm_stderr": 0.035207039905179635 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.8282208588957055, "acc_stderr": 0.029634717272371037, "acc_norm": 0.8282208588957055, "acc_norm_stderr": 0.029634717272371037 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.5, "acc_stderr": 0.04745789978762494, "acc_norm": 0.5, "acc_norm_stderr": 0.04745789978762494 }, "harness|hendrycksTest-management|5": { "acc": 0.8155339805825242, "acc_stderr": 0.03840423627288276, "acc_norm": 0.8155339805825242, "acc_norm_stderr": 0.03840423627288276 }, "harness|hendrycksTest-marketing|5": { "acc": 0.8974358974358975, "acc_stderr": 0.019875655027867457, "acc_norm": 0.8974358974358975, "acc_norm_stderr": 0.019875655027867457 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.69, "acc_stderr": 0.04648231987117316, "acc_norm": 0.69, "acc_norm_stderr": 0.04648231987117316 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.8531289910600255, "acc_stderr": 0.012658201736147288, "acc_norm": 0.8531289910600255, "acc_norm_stderr": 0.012658201736147288 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.7658959537572254, "acc_stderr": 0.022797110278071124, "acc_norm": 0.7658959537572254, "acc_norm_stderr": 0.022797110278071124 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.511731843575419, "acc_stderr": 0.016717897676932162, "acc_norm": 0.511731843575419, "acc_norm_stderr": 0.016717897676932162 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.7843137254901961, "acc_stderr": 0.02355083135199509, "acc_norm": 0.7843137254901961, "acc_norm_stderr": 0.02355083135199509 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.7620578778135049, "acc_stderr": 0.02418515064781871, "acc_norm": 0.7620578778135049, "acc_norm_stderr": 0.02418515064781871 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.8209876543209876, "acc_stderr": 0.02133086876212706, "acc_norm": 0.8209876543209876, "acc_norm_stderr": 0.02133086876212706 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.574468085106383, "acc_stderr": 0.02949482760014436, "acc_norm": 0.574468085106383, "acc_norm_stderr": 0.02949482760014436 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.546284224250326, "acc_stderr": 0.012715404841277752, "acc_norm": 0.546284224250326, "acc_norm_stderr": 0.012715404841277752 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.75, "acc_stderr": 0.026303648393696036, "acc_norm": 0.75, "acc_norm_stderr": 0.026303648393696036 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.7598039215686274, "acc_stderr": 0.01728276069516741, "acc_norm": 0.7598039215686274, "acc_norm_stderr": 0.01728276069516741 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.7727272727272727, "acc_stderr": 0.04013964554072775, "acc_norm": 0.7727272727272727, "acc_norm_stderr": 0.04013964554072775 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.7673469387755102, "acc_stderr": 0.027049257915896175, "acc_norm": 0.7673469387755102, "acc_norm_stderr": 0.027049257915896175 }, "harness|hendrycksTest-sociology|5": { "acc": 0.8756218905472637, "acc_stderr": 0.023335401790166327, "acc_norm": 0.8756218905472637, "acc_norm_stderr": 0.023335401790166327 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.88, "acc_stderr": 0.03265986323710906, "acc_norm": 0.88, "acc_norm_stderr": 0.03265986323710906 }, "harness|hendrycksTest-virology|5": { "acc": 0.5120481927710844, "acc_stderr": 0.03891364495835817, "acc_norm": 0.5120481927710844, "acc_norm_stderr": 0.03891364495835817 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.8713450292397661, "acc_stderr": 0.02567934272327691, "acc_norm": 0.8713450292397661, "acc_norm_stderr": 0.02567934272327691 }, "harness|truthfulqa:mc|0": { "mc1": 0.4675642594859241, "mc1_stderr": 0.017466632149577613, "mc2": 0.6577655722264159, "mc2_stderr": 0.014903281756393213 }, "harness|winogrande|5": { "acc": 0.8326756116811366, "acc_stderr": 0.010490608806828079 }, "harness|gsm8k|5": { "acc": 0.6262319939347991, "acc_stderr": 0.013326342860737007 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_allenai__tulu-2-dpo-70b
[ "region:us" ]
2024-02-02T06:51:15+00:00
{"pretty_name": "Evaluation run of allenai/tulu-2-dpo-70b", "dataset_summary": "Dataset automatically created during the evaluation run of model [allenai/tulu-2-dpo-70b](https://huggingface.co/allenai/tulu-2-dpo-70b) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_allenai__tulu-2-dpo-70b\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-02-02T06:48:43.589029](https://huggingface.co/datasets/open-llm-leaderboard/details_allenai__tulu-2-dpo-70b/blob/main/results_2024-02-02T06-48-43.589029.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.699296658680991,\n \"acc_stderr\": 0.03051571429129605,\n \"acc_norm\": 0.7020037559735633,\n \"acc_norm_stderr\": 0.031114133505086575,\n \"mc1\": 0.4675642594859241,\n \"mc1_stderr\": 0.017466632149577613,\n \"mc2\": 0.6577655722264159,\n \"mc2_stderr\": 0.014903281756393213\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.6825938566552902,\n \"acc_stderr\": 0.013602239088038167,\n \"acc_norm\": 0.7209897610921502,\n \"acc_norm_stderr\": 0.01310678488360134\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.7082254530969926,\n \"acc_stderr\": 0.004536500714147989,\n \"acc_norm\": 0.8898625771758614,\n \"acc_norm_stderr\": 0.0031242116171988606\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.3,\n \"acc_stderr\": 0.046056618647183814,\n \"acc_norm\": 0.3,\n \"acc_norm_stderr\": 0.046056618647183814\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.6148148148148148,\n \"acc_stderr\": 0.04203921040156279,\n \"acc_norm\": 0.6148148148148148,\n \"acc_norm_stderr\": 0.04203921040156279\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.7828947368421053,\n \"acc_stderr\": 0.03355045304882924,\n \"acc_norm\": 0.7828947368421053,\n \"acc_norm_stderr\": 0.03355045304882924\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.73,\n \"acc_stderr\": 0.0446196043338474,\n \"acc_norm\": 0.73,\n \"acc_norm_stderr\": 0.0446196043338474\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.7509433962264151,\n \"acc_stderr\": 0.02661648298050171,\n \"acc_norm\": 0.7509433962264151,\n \"acc_norm_stderr\": 0.02661648298050171\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.8263888888888888,\n \"acc_stderr\": 0.03167473383795718,\n \"acc_norm\": 0.8263888888888888,\n \"acc_norm_stderr\": 0.03167473383795718\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.51,\n \"acc_stderr\": 0.05024183937956911,\n \"acc_norm\": 0.51,\n \"acc_norm_stderr\": 0.05024183937956911\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.59,\n \"acc_stderr\": 0.04943110704237102,\n \"acc_norm\": 0.59,\n \"acc_norm_stderr\": 0.04943110704237102\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.39,\n \"acc_stderr\": 0.049020713000019756,\n \"acc_norm\": 0.39,\n \"acc_norm_stderr\": 0.049020713000019756\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.7225433526011561,\n \"acc_stderr\": 0.03414014007044037,\n \"acc_norm\": 0.7225433526011561,\n \"acc_norm_stderr\": 0.03414014007044037\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.43137254901960786,\n \"acc_stderr\": 0.04928099597287534,\n \"acc_norm\": 0.43137254901960786,\n \"acc_norm_stderr\": 0.04928099597287534\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.72,\n \"acc_stderr\": 0.045126085985421276,\n \"acc_norm\": 0.72,\n \"acc_norm_stderr\": 0.045126085985421276\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.6978723404255319,\n \"acc_stderr\": 0.03001755447188056,\n \"acc_norm\": 0.6978723404255319,\n \"acc_norm_stderr\": 0.03001755447188056\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.4473684210526316,\n \"acc_stderr\": 0.04677473004491199,\n \"acc_norm\": 0.4473684210526316,\n \"acc_norm_stderr\": 0.04677473004491199\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.5793103448275863,\n \"acc_stderr\": 0.04113914981189261,\n \"acc_norm\": 0.5793103448275863,\n \"acc_norm_stderr\": 0.04113914981189261\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.455026455026455,\n \"acc_stderr\": 0.025646928361049398,\n \"acc_norm\": 0.455026455026455,\n \"acc_norm_stderr\": 0.025646928361049398\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.48412698412698413,\n \"acc_stderr\": 0.04469881854072606,\n \"acc_norm\": 0.48412698412698413,\n \"acc_norm_stderr\": 0.04469881854072606\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.52,\n \"acc_stderr\": 0.050211673156867795,\n \"acc_norm\": 0.52,\n \"acc_norm_stderr\": 0.050211673156867795\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.7935483870967742,\n \"acc_stderr\": 0.023025899617188716,\n \"acc_norm\": 0.7935483870967742,\n \"acc_norm_stderr\": 0.023025899617188716\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.5270935960591133,\n \"acc_stderr\": 0.03512819077876106,\n \"acc_norm\": 0.5270935960591133,\n \"acc_norm_stderr\": 0.03512819077876106\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.78,\n \"acc_stderr\": 0.04163331998932261,\n \"acc_norm\": 0.78,\n \"acc_norm_stderr\": 0.04163331998932261\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.8363636363636363,\n \"acc_stderr\": 0.02888787239548795,\n \"acc_norm\": 0.8363636363636363,\n \"acc_norm_stderr\": 0.02888787239548795\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.8888888888888888,\n \"acc_stderr\": 0.02239078763821676,\n \"acc_norm\": 0.8888888888888888,\n \"acc_norm_stderr\": 0.02239078763821676\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.9378238341968912,\n \"acc_stderr\": 0.01742697415424052,\n \"acc_norm\": 0.9378238341968912,\n \"acc_norm_stderr\": 0.01742697415424052\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.7333333333333333,\n \"acc_stderr\": 0.022421273612923707,\n \"acc_norm\": 0.7333333333333333,\n \"acc_norm_stderr\": 0.022421273612923707\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.3592592592592593,\n \"acc_stderr\": 0.029252905927251976,\n \"acc_norm\": 0.3592592592592593,\n \"acc_norm_stderr\": 0.029252905927251976\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.8025210084033614,\n \"acc_stderr\": 0.025859164122051453,\n \"acc_norm\": 0.8025210084033614,\n \"acc_norm_stderr\": 0.025859164122051453\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.47019867549668876,\n \"acc_stderr\": 0.040752249922169775,\n \"acc_norm\": 0.47019867549668876,\n \"acc_norm_stderr\": 0.040752249922169775\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.8935779816513761,\n \"acc_stderr\": 0.013221554674594372,\n \"acc_norm\": 0.8935779816513761,\n \"acc_norm_stderr\": 0.013221554674594372\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.6064814814814815,\n \"acc_stderr\": 0.03331747876370312,\n \"acc_norm\": 0.6064814814814815,\n \"acc_norm_stderr\": 0.03331747876370312\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.9166666666666666,\n \"acc_stderr\": 0.019398452135813905,\n \"acc_norm\": 0.9166666666666666,\n \"acc_norm_stderr\": 0.019398452135813905\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.8607594936708861,\n \"acc_stderr\": 0.022535526352692705,\n \"acc_norm\": 0.8607594936708861,\n \"acc_norm_stderr\": 0.022535526352692705\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.7668161434977578,\n \"acc_stderr\": 0.028380391147094713,\n \"acc_norm\": 0.7668161434977578,\n \"acc_norm_stderr\": 0.028380391147094713\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.8320610687022901,\n \"acc_stderr\": 0.03278548537343138,\n \"acc_norm\": 0.8320610687022901,\n \"acc_norm_stderr\": 0.03278548537343138\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.8677685950413223,\n \"acc_stderr\": 0.0309227883204458,\n \"acc_norm\": 0.8677685950413223,\n \"acc_norm_stderr\": 0.0309227883204458\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.8425925925925926,\n \"acc_stderr\": 0.035207039905179635,\n \"acc_norm\": 0.8425925925925926,\n \"acc_norm_stderr\": 0.035207039905179635\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.8282208588957055,\n \"acc_stderr\": 0.029634717272371037,\n \"acc_norm\": 0.8282208588957055,\n \"acc_norm_stderr\": 0.029634717272371037\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.5,\n \"acc_stderr\": 0.04745789978762494,\n \"acc_norm\": 0.5,\n \"acc_norm_stderr\": 0.04745789978762494\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.8155339805825242,\n \"acc_stderr\": 0.03840423627288276,\n \"acc_norm\": 0.8155339805825242,\n \"acc_norm_stderr\": 0.03840423627288276\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8974358974358975,\n \"acc_stderr\": 0.019875655027867457,\n \"acc_norm\": 0.8974358974358975,\n \"acc_norm_stderr\": 0.019875655027867457\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.69,\n \"acc_stderr\": 0.04648231987117316,\n \"acc_norm\": 0.69,\n \"acc_norm_stderr\": 0.04648231987117316\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.8531289910600255,\n \"acc_stderr\": 0.012658201736147288,\n \"acc_norm\": 0.8531289910600255,\n \"acc_norm_stderr\": 0.012658201736147288\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.7658959537572254,\n \"acc_stderr\": 0.022797110278071124,\n \"acc_norm\": 0.7658959537572254,\n \"acc_norm_stderr\": 0.022797110278071124\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.511731843575419,\n \"acc_stderr\": 0.016717897676932162,\n \"acc_norm\": 0.511731843575419,\n \"acc_norm_stderr\": 0.016717897676932162\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.7843137254901961,\n \"acc_stderr\": 0.02355083135199509,\n \"acc_norm\": 0.7843137254901961,\n \"acc_norm_stderr\": 0.02355083135199509\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.7620578778135049,\n \"acc_stderr\": 0.02418515064781871,\n \"acc_norm\": 0.7620578778135049,\n \"acc_norm_stderr\": 0.02418515064781871\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.8209876543209876,\n \"acc_stderr\": 0.02133086876212706,\n \"acc_norm\": 0.8209876543209876,\n \"acc_norm_stderr\": 0.02133086876212706\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.574468085106383,\n \"acc_stderr\": 0.02949482760014436,\n \"acc_norm\": 0.574468085106383,\n \"acc_norm_stderr\": 0.02949482760014436\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.546284224250326,\n \"acc_stderr\": 0.012715404841277752,\n \"acc_norm\": 0.546284224250326,\n \"acc_norm_stderr\": 0.012715404841277752\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.75,\n \"acc_stderr\": 0.026303648393696036,\n \"acc_norm\": 0.75,\n \"acc_norm_stderr\": 0.026303648393696036\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.7598039215686274,\n \"acc_stderr\": 0.01728276069516741,\n \"acc_norm\": 0.7598039215686274,\n \"acc_norm_stderr\": 0.01728276069516741\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.7727272727272727,\n \"acc_stderr\": 0.04013964554072775,\n \"acc_norm\": 0.7727272727272727,\n \"acc_norm_stderr\": 0.04013964554072775\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.7673469387755102,\n \"acc_stderr\": 0.027049257915896175,\n \"acc_norm\": 0.7673469387755102,\n \"acc_norm_stderr\": 0.027049257915896175\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.8756218905472637,\n \"acc_stderr\": 0.023335401790166327,\n \"acc_norm\": 0.8756218905472637,\n \"acc_norm_stderr\": 0.023335401790166327\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.88,\n \"acc_stderr\": 0.03265986323710906,\n \"acc_norm\": 0.88,\n \"acc_norm_stderr\": 0.03265986323710906\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.5120481927710844,\n \"acc_stderr\": 0.03891364495835817,\n \"acc_norm\": 0.5120481927710844,\n \"acc_norm_stderr\": 0.03891364495835817\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.8713450292397661,\n \"acc_stderr\": 0.02567934272327691,\n \"acc_norm\": 0.8713450292397661,\n \"acc_norm_stderr\": 0.02567934272327691\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.4675642594859241,\n \"mc1_stderr\": 0.017466632149577613,\n \"mc2\": 0.6577655722264159,\n \"mc2_stderr\": 0.014903281756393213\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.8326756116811366,\n \"acc_stderr\": 0.010490608806828079\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.6262319939347991,\n \"acc_stderr\": 0.013326342860737007\n }\n}\n```", "repo_url": "https://huggingface.co/allenai/tulu-2-dpo-70b", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_02_02T06_48_43.589029", "path": ["**/details_harness|arc:challenge|25_2024-02-02T06-48-43.589029.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-02-02T06-48-43.589029.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_02_02T06_48_43.589029", "path": ["**/details_harness|gsm8k|5_2024-02-02T06-48-43.589029.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-02-02T06-48-43.589029.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_02_02T06_48_43.589029", "path": ["**/details_harness|hellaswag|10_2024-02-02T06-48-43.589029.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-02-02T06-48-43.589029.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_02_02T06_48_43.589029", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-02T06-48-43.589029.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-02-02T06-48-43.589029.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-02-02T06-48-43.589029.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-02-02T06-48-43.589029.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-02T06-48-43.589029.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-02-02T06-48-43.589029.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-02-02T06-48-43.589029.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-02-02T06-48-43.589029.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-02-02T06-48-43.589029.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-02-02T06-48-43.589029.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-02-02T06-48-43.589029.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-02-02T06-48-43.589029.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-02T06-48-43.589029.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-02-02T06-48-43.589029.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-02T06-48-43.589029.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-02T06-48-43.589029.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-02-02T06-48-43.589029.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-02-02T06-48-43.589029.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-02-02T06-48-43.589029.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-02T06-48-43.589029.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-02T06-48-43.589029.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-02T06-48-43.589029.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-02-02T06-48-43.589029.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-02T06-48-43.589029.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-02T06-48-43.589029.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-02T06-48-43.589029.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-02T06-48-43.589029.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-02-02T06-48-43.589029.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-02T06-48-43.589029.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-02T06-48-43.589029.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-02T06-48-43.589029.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-02T06-48-43.589029.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-02-02T06-48-43.589029.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-02-02T06-48-43.589029.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-02-02T06-48-43.589029.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-02-02T06-48-43.589029.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-02T06-48-43.589029.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-02-02T06-48-43.589029.parquet", "**/details_harness|hendrycksTest-management|5_2024-02-02T06-48-43.589029.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-02-02T06-48-43.589029.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-02-02T06-48-43.589029.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-02-02T06-48-43.589029.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-02-02T06-48-43.589029.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-02T06-48-43.589029.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-02-02T06-48-43.589029.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-02-02T06-48-43.589029.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-02-02T06-48-43.589029.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-02-02T06-48-43.589029.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-02-02T06-48-43.589029.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-02-02T06-48-43.589029.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-02-02T06-48-43.589029.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-02-02T06-48-43.589029.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-02-02T06-48-43.589029.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-02-02T06-48-43.589029.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-02T06-48-43.589029.parquet", "**/details_harness|hendrycksTest-virology|5_2024-02-02T06-48-43.589029.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-02-02T06-48-43.589029.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-02T06-48-43.589029.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-02-02T06-48-43.589029.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-02-02T06-48-43.589029.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-02-02T06-48-43.589029.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-02T06-48-43.589029.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-02-02T06-48-43.589029.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-02-02T06-48-43.589029.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-02-02T06-48-43.589029.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-02-02T06-48-43.589029.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-02-02T06-48-43.589029.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-02-02T06-48-43.589029.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-02-02T06-48-43.589029.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-02T06-48-43.589029.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-02-02T06-48-43.589029.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-02T06-48-43.589029.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-02T06-48-43.589029.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-02-02T06-48-43.589029.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-02-02T06-48-43.589029.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-02-02T06-48-43.589029.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-02T06-48-43.589029.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-02T06-48-43.589029.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-02T06-48-43.589029.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-02-02T06-48-43.589029.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-02T06-48-43.589029.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-02T06-48-43.589029.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-02T06-48-43.589029.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-02T06-48-43.589029.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-02-02T06-48-43.589029.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-02T06-48-43.589029.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-02T06-48-43.589029.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-02T06-48-43.589029.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-02T06-48-43.589029.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-02-02T06-48-43.589029.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-02-02T06-48-43.589029.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-02-02T06-48-43.589029.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-02-02T06-48-43.589029.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-02T06-48-43.589029.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-02-02T06-48-43.589029.parquet", "**/details_harness|hendrycksTest-management|5_2024-02-02T06-48-43.589029.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-02-02T06-48-43.589029.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-02-02T06-48-43.589029.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-02-02T06-48-43.589029.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-02-02T06-48-43.589029.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-02T06-48-43.589029.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-02-02T06-48-43.589029.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-02-02T06-48-43.589029.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-02-02T06-48-43.589029.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-02-02T06-48-43.589029.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-02-02T06-48-43.589029.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-02-02T06-48-43.589029.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-02-02T06-48-43.589029.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-02-02T06-48-43.589029.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-02-02T06-48-43.589029.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-02-02T06-48-43.589029.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-02T06-48-43.589029.parquet", "**/details_harness|hendrycksTest-virology|5_2024-02-02T06-48-43.589029.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-02-02T06-48-43.589029.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_02_02T06_48_43.589029", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-02T06-48-43.589029.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-02T06-48-43.589029.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_02_02T06_48_43.589029", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-02-02T06-48-43.589029.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-02-02T06-48-43.589029.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_02_02T06_48_43.589029", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-02-02T06-48-43.589029.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-02-02T06-48-43.589029.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_02_02T06_48_43.589029", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-02-02T06-48-43.589029.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-02-02T06-48-43.589029.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_02_02T06_48_43.589029", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-02T06-48-43.589029.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-02T06-48-43.589029.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_02_02T06_48_43.589029", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-02-02T06-48-43.589029.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-02-02T06-48-43.589029.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_02_02T06_48_43.589029", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-02-02T06-48-43.589029.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-02-02T06-48-43.589029.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_02_02T06_48_43.589029", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-02-02T06-48-43.589029.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-02-02T06-48-43.589029.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_02_02T06_48_43.589029", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-02-02T06-48-43.589029.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-02-02T06-48-43.589029.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_02_02T06_48_43.589029", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-02-02T06-48-43.589029.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-02-02T06-48-43.589029.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_02_02T06_48_43.589029", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-02-02T06-48-43.589029.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-02-02T06-48-43.589029.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_02_02T06_48_43.589029", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-02-02T06-48-43.589029.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-02-02T06-48-43.589029.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_02_02T06_48_43.589029", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-02T06-48-43.589029.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-02T06-48-43.589029.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_02_02T06_48_43.589029", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-02-02T06-48-43.589029.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-02-02T06-48-43.589029.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_02_02T06_48_43.589029", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-02T06-48-43.589029.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-02T06-48-43.589029.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_02_02T06_48_43.589029", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-02T06-48-43.589029.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-02T06-48-43.589029.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_02_02T06_48_43.589029", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-02-02T06-48-43.589029.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-02-02T06-48-43.589029.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_02_02T06_48_43.589029", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-02-02T06-48-43.589029.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-02-02T06-48-43.589029.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_02_02T06_48_43.589029", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-02-02T06-48-43.589029.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-02-02T06-48-43.589029.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_02_02T06_48_43.589029", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-02T06-48-43.589029.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-02T06-48-43.589029.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_02_02T06_48_43.589029", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-02T06-48-43.589029.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-02T06-48-43.589029.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_02_02T06_48_43.589029", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-02T06-48-43.589029.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-02T06-48-43.589029.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_02_02T06_48_43.589029", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-02-02T06-48-43.589029.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-02-02T06-48-43.589029.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_02_02T06_48_43.589029", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-02T06-48-43.589029.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-02T06-48-43.589029.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_02_02T06_48_43.589029", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-02T06-48-43.589029.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-02T06-48-43.589029.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_02_02T06_48_43.589029", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-02T06-48-43.589029.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-02T06-48-43.589029.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_02_02T06_48_43.589029", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-02T06-48-43.589029.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-02T06-48-43.589029.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_02_02T06_48_43.589029", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-02-02T06-48-43.589029.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-02-02T06-48-43.589029.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_02_02T06_48_43.589029", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-02T06-48-43.589029.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-02T06-48-43.589029.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_02_02T06_48_43.589029", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-02T06-48-43.589029.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-02T06-48-43.589029.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_02_02T06_48_43.589029", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-02T06-48-43.589029.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-02T06-48-43.589029.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_02_02T06_48_43.589029", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-02T06-48-43.589029.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-02T06-48-43.589029.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_02_02T06_48_43.589029", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-02-02T06-48-43.589029.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-02-02T06-48-43.589029.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_02_02T06_48_43.589029", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-02-02T06-48-43.589029.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-02-02T06-48-43.589029.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_02_02T06_48_43.589029", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-02-02T06-48-43.589029.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-02-02T06-48-43.589029.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_02_02T06_48_43.589029", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-02-02T06-48-43.589029.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-02-02T06-48-43.589029.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_02_02T06_48_43.589029", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-02T06-48-43.589029.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-02T06-48-43.589029.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_02_02T06_48_43.589029", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-02-02T06-48-43.589029.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-02-02T06-48-43.589029.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_02_02T06_48_43.589029", "path": ["**/details_harness|hendrycksTest-management|5_2024-02-02T06-48-43.589029.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-02-02T06-48-43.589029.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_02_02T06_48_43.589029", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-02-02T06-48-43.589029.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-02-02T06-48-43.589029.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_02_02T06_48_43.589029", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-02-02T06-48-43.589029.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-02-02T06-48-43.589029.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_02_02T06_48_43.589029", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-02-02T06-48-43.589029.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-02-02T06-48-43.589029.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_02_02T06_48_43.589029", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-02-02T06-48-43.589029.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-02-02T06-48-43.589029.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_02_02T06_48_43.589029", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-02T06-48-43.589029.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-02T06-48-43.589029.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_02_02T06_48_43.589029", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-02-02T06-48-43.589029.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-02-02T06-48-43.589029.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_02_02T06_48_43.589029", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-02-02T06-48-43.589029.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-02-02T06-48-43.589029.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_02_02T06_48_43.589029", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-02-02T06-48-43.589029.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-02-02T06-48-43.589029.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_02_02T06_48_43.589029", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-02-02T06-48-43.589029.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-02-02T06-48-43.589029.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_02_02T06_48_43.589029", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-02-02T06-48-43.589029.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-02-02T06-48-43.589029.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_02_02T06_48_43.589029", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-02-02T06-48-43.589029.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-02-02T06-48-43.589029.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_02_02T06_48_43.589029", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-02-02T06-48-43.589029.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-02-02T06-48-43.589029.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_02_02T06_48_43.589029", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-02-02T06-48-43.589029.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-02-02T06-48-43.589029.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_02_02T06_48_43.589029", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-02-02T06-48-43.589029.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-02-02T06-48-43.589029.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_02_02T06_48_43.589029", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-02-02T06-48-43.589029.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-02-02T06-48-43.589029.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_02_02T06_48_43.589029", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-02T06-48-43.589029.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-02T06-48-43.589029.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_02_02T06_48_43.589029", "path": ["**/details_harness|hendrycksTest-virology|5_2024-02-02T06-48-43.589029.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-02-02T06-48-43.589029.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_02_02T06_48_43.589029", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-02-02T06-48-43.589029.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-02-02T06-48-43.589029.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_02_02T06_48_43.589029", "path": ["**/details_harness|truthfulqa:mc|0_2024-02-02T06-48-43.589029.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-02-02T06-48-43.589029.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_02_02T06_48_43.589029", "path": ["**/details_harness|winogrande|5_2024-02-02T06-48-43.589029.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-02-02T06-48-43.589029.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_02_02T06_48_43.589029", "path": ["results_2024-02-02T06-48-43.589029.parquet"]}, {"split": "latest", "path": ["results_2024-02-02T06-48-43.589029.parquet"]}]}]}
2024-02-02T06:51:40+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of allenai/tulu-2-dpo-70b Dataset automatically created during the evaluation run of model allenai/tulu-2-dpo-70b on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-02-02T06:48:43.589029(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of allenai/tulu-2-dpo-70b\n\n\n\nDataset automatically created during the evaluation run of model allenai/tulu-2-dpo-70b on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-02-02T06:48:43.589029(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of allenai/tulu-2-dpo-70b\n\n\n\nDataset automatically created during the evaluation run of model allenai/tulu-2-dpo-70b on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-02-02T06:48:43.589029(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
57f9aa4aa36b9ba2bf0f61c73f487aa1c93050fd
# Dataset Card for Evaluation run of TomGrc/FusionNet_34Bx2_MoE_v0.1 <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [TomGrc/FusionNet_34Bx2_MoE_v0.1](https://huggingface.co/TomGrc/FusionNet_34Bx2_MoE_v0.1) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_TomGrc__FusionNet_34Bx2_MoE_v0.1", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-02-02T06:58:32.819028](https://huggingface.co/datasets/open-llm-leaderboard/details_TomGrc__FusionNet_34Bx2_MoE_v0.1/blob/main/results_2024-02-02T06-58-32.819028.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.7652072421659043, "acc_stderr": 0.028262097051159108, "acc_norm": 0.7683507533493169, "acc_norm_stderr": 0.02880792714832569, "mc1": 0.5483476132190942, "mc1_stderr": 0.01742148030027764, "mc2": 0.7100760148764914, "mc2_stderr": 0.01415531370772675 }, "harness|arc:challenge|25": { "acc": 0.7141638225255973, "acc_stderr": 0.013203196088537369, "acc_norm": 0.7372013651877133, "acc_norm_stderr": 0.012862523175351333 }, "harness|hellaswag|10": { "acc": 0.668990240987851, "acc_stderr": 0.004696148339570979, "acc_norm": 0.8645688109938259, "acc_norm_stderr": 0.0034148422365171 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.49, "acc_stderr": 0.05024183937956912, "acc_norm": 0.49, "acc_norm_stderr": 0.05024183937956912 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.7481481481481481, "acc_stderr": 0.03749850709174021, "acc_norm": 0.7481481481481481, "acc_norm_stderr": 0.03749850709174021 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.875, "acc_stderr": 0.026913523521537846, "acc_norm": 0.875, "acc_norm_stderr": 0.026913523521537846 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.76, "acc_stderr": 0.04292346959909283, "acc_norm": 0.76, "acc_norm_stderr": 0.04292346959909283 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.8113207547169812, "acc_stderr": 0.024079995130062253, "acc_norm": 0.8113207547169812, "acc_norm_stderr": 0.024079995130062253 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.9027777777777778, "acc_stderr": 0.024774516250440182, "acc_norm": 0.9027777777777778, "acc_norm_stderr": 0.024774516250440182 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.55, "acc_stderr": 0.04999999999999999, "acc_norm": 0.55, "acc_norm_stderr": 0.04999999999999999 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.59, "acc_stderr": 0.04943110704237101, "acc_norm": 0.59, "acc_norm_stderr": 0.04943110704237101 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.42, "acc_stderr": 0.049604496374885836, "acc_norm": 0.42, "acc_norm_stderr": 0.049604496374885836 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.7109826589595376, "acc_stderr": 0.034564257450869995, "acc_norm": 0.7109826589595376, "acc_norm_stderr": 0.034564257450869995 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.5392156862745098, "acc_stderr": 0.04959859966384181, "acc_norm": 0.5392156862745098, "acc_norm_stderr": 0.04959859966384181 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.79, "acc_stderr": 0.04093601807403326, "acc_norm": 0.79, "acc_norm_stderr": 0.04093601807403326 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.7659574468085106, "acc_stderr": 0.027678452578212394, "acc_norm": 0.7659574468085106, "acc_norm_stderr": 0.027678452578212394 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.6052631578947368, "acc_stderr": 0.04598188057816542, "acc_norm": 0.6052631578947368, "acc_norm_stderr": 0.04598188057816542 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.7517241379310344, "acc_stderr": 0.036001056927277696, "acc_norm": 0.7517241379310344, "acc_norm_stderr": 0.036001056927277696 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.7513227513227513, "acc_stderr": 0.02226181769240016, "acc_norm": 0.7513227513227513, "acc_norm_stderr": 0.02226181769240016 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.5396825396825397, "acc_stderr": 0.04458029125470973, "acc_norm": 0.5396825396825397, "acc_norm_stderr": 0.04458029125470973 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.6, "acc_stderr": 0.049236596391733084, "acc_norm": 0.6, "acc_norm_stderr": 0.049236596391733084 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.9096774193548387, "acc_stderr": 0.01630657064448831, "acc_norm": 0.9096774193548387, "acc_norm_stderr": 0.01630657064448831 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.6798029556650246, "acc_stderr": 0.03282649385304151, "acc_norm": 0.6798029556650246, "acc_norm_stderr": 0.03282649385304151 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.77, "acc_stderr": 0.042295258468165044, "acc_norm": 0.77, "acc_norm_stderr": 0.042295258468165044 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.8787878787878788, "acc_stderr": 0.025485498373343237, "acc_norm": 0.8787878787878788, "acc_norm_stderr": 0.025485498373343237 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.9292929292929293, "acc_stderr": 0.01826310542019949, "acc_norm": 0.9292929292929293, "acc_norm_stderr": 0.01826310542019949 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.9740932642487047, "acc_stderr": 0.011464523356953162, "acc_norm": 0.9740932642487047, "acc_norm_stderr": 0.011464523356953162 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.8076923076923077, "acc_stderr": 0.019982347208637306, "acc_norm": 0.8076923076923077, "acc_norm_stderr": 0.019982347208637306 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.45185185185185184, "acc_stderr": 0.03034386299851262, "acc_norm": 0.45185185185185184, "acc_norm_stderr": 0.03034386299851262 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.8487394957983193, "acc_stderr": 0.02327425589870796, "acc_norm": 0.8487394957983193, "acc_norm_stderr": 0.02327425589870796 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.5231788079470199, "acc_stderr": 0.04078093859163085, "acc_norm": 0.5231788079470199, "acc_norm_stderr": 0.04078093859163085 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.9211009174311927, "acc_stderr": 0.011558198113769598, "acc_norm": 0.9211009174311927, "acc_norm_stderr": 0.011558198113769598 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.6666666666666666, "acc_stderr": 0.03214952147802749, "acc_norm": 0.6666666666666666, "acc_norm_stderr": 0.03214952147802749 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.9264705882352942, "acc_stderr": 0.018318855850089678, "acc_norm": 0.9264705882352942, "acc_norm_stderr": 0.018318855850089678 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.9113924050632911, "acc_stderr": 0.018498315206865384, "acc_norm": 0.9113924050632911, "acc_norm_stderr": 0.018498315206865384 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.8071748878923767, "acc_stderr": 0.026478240960489365, "acc_norm": 0.8071748878923767, "acc_norm_stderr": 0.026478240960489365 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.8702290076335878, "acc_stderr": 0.029473649496907065, "acc_norm": 0.8702290076335878, "acc_norm_stderr": 0.029473649496907065 }, "harness|hendrycksTest-international_law|5": { "acc": 0.8677685950413223, "acc_stderr": 0.0309227883204458, "acc_norm": 0.8677685950413223, "acc_norm_stderr": 0.0309227883204458 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.8981481481481481, "acc_stderr": 0.02923927267563275, "acc_norm": 0.8981481481481481, "acc_norm_stderr": 0.02923927267563275 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.8711656441717791, "acc_stderr": 0.026321383198783674, "acc_norm": 0.8711656441717791, "acc_norm_stderr": 0.026321383198783674 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.5535714285714286, "acc_stderr": 0.047184714852195865, "acc_norm": 0.5535714285714286, "acc_norm_stderr": 0.047184714852195865 }, "harness|hendrycksTest-management|5": { "acc": 0.8737864077669902, "acc_stderr": 0.03288180278808629, "acc_norm": 0.8737864077669902, "acc_norm_stderr": 0.03288180278808629 }, "harness|hendrycksTest-marketing|5": { "acc": 0.9444444444444444, "acc_stderr": 0.01500631280644693, "acc_norm": 0.9444444444444444, "acc_norm_stderr": 0.01500631280644693 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.86, "acc_stderr": 0.0348735088019777, "acc_norm": 0.86, "acc_norm_stderr": 0.0348735088019777 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.913154533844189, "acc_stderr": 0.01007029837774778, "acc_norm": 0.913154533844189, "acc_norm_stderr": 0.01007029837774778 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.8294797687861272, "acc_stderr": 0.020247961569303728, "acc_norm": 0.8294797687861272, "acc_norm_stderr": 0.020247961569303728 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.7988826815642458, "acc_stderr": 0.013405946402609044, "acc_norm": 0.7988826815642458, "acc_norm_stderr": 0.013405946402609044 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.8431372549019608, "acc_stderr": 0.02082375883758091, "acc_norm": 0.8431372549019608, "acc_norm_stderr": 0.02082375883758091 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.8006430868167203, "acc_stderr": 0.022691033780549656, "acc_norm": 0.8006430868167203, "acc_norm_stderr": 0.022691033780549656 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.8672839506172839, "acc_stderr": 0.01887735383957184, "acc_norm": 0.8672839506172839, "acc_norm_stderr": 0.01887735383957184 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.6418439716312057, "acc_stderr": 0.028602085862759422, "acc_norm": 0.6418439716312057, "acc_norm_stderr": 0.028602085862759422 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.5880052151238592, "acc_stderr": 0.01257087103214607, "acc_norm": 0.5880052151238592, "acc_norm_stderr": 0.01257087103214607 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.8345588235294118, "acc_stderr": 0.02257177102549475, "acc_norm": 0.8345588235294118, "acc_norm_stderr": 0.02257177102549475 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.8169934640522876, "acc_stderr": 0.01564306991127334, "acc_norm": 0.8169934640522876, "acc_norm_stderr": 0.01564306991127334 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.7272727272727273, "acc_stderr": 0.04265792110940589, "acc_norm": 0.7272727272727273, "acc_norm_stderr": 0.04265792110940589 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.8489795918367347, "acc_stderr": 0.022923004094736847, "acc_norm": 0.8489795918367347, "acc_norm_stderr": 0.022923004094736847 }, "harness|hendrycksTest-sociology|5": { "acc": 0.9104477611940298, "acc_stderr": 0.02019067053502792, "acc_norm": 0.9104477611940298, "acc_norm_stderr": 0.02019067053502792 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.92, "acc_stderr": 0.0272659924344291, "acc_norm": 0.92, "acc_norm_stderr": 0.0272659924344291 }, "harness|hendrycksTest-virology|5": { "acc": 0.5783132530120482, "acc_stderr": 0.03844453181770917, "acc_norm": 0.5783132530120482, "acc_norm_stderr": 0.03844453181770917 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.8654970760233918, "acc_stderr": 0.0261682213446623, "acc_norm": 0.8654970760233918, "acc_norm_stderr": 0.0261682213446623 }, "harness|truthfulqa:mc|0": { "mc1": 0.5483476132190942, "mc1_stderr": 0.01742148030027764, "mc2": 0.7100760148764914, "mc2_stderr": 0.01415531370772675 }, "harness|winogrande|5": { "acc": 0.8334648776637726, "acc_stderr": 0.010470796496781093 }, "harness|gsm8k|5": { "acc": 0.730098559514784, "acc_stderr": 0.012227442856468896 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_TomGrc__FusionNet_34Bx2_MoE_v0.1
[ "region:us" ]
2024-02-02T07:00:45+00:00
{"pretty_name": "Evaluation run of TomGrc/FusionNet_34Bx2_MoE_v0.1", "dataset_summary": "Dataset automatically created during the evaluation run of model [TomGrc/FusionNet_34Bx2_MoE_v0.1](https://huggingface.co/TomGrc/FusionNet_34Bx2_MoE_v0.1) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_TomGrc__FusionNet_34Bx2_MoE_v0.1\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-02-02T06:58:32.819028](https://huggingface.co/datasets/open-llm-leaderboard/details_TomGrc__FusionNet_34Bx2_MoE_v0.1/blob/main/results_2024-02-02T06-58-32.819028.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.7652072421659043,\n \"acc_stderr\": 0.028262097051159108,\n \"acc_norm\": 0.7683507533493169,\n \"acc_norm_stderr\": 0.02880792714832569,\n \"mc1\": 0.5483476132190942,\n \"mc1_stderr\": 0.01742148030027764,\n \"mc2\": 0.7100760148764914,\n \"mc2_stderr\": 0.01415531370772675\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.7141638225255973,\n \"acc_stderr\": 0.013203196088537369,\n \"acc_norm\": 0.7372013651877133,\n \"acc_norm_stderr\": 0.012862523175351333\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.668990240987851,\n \"acc_stderr\": 0.004696148339570979,\n \"acc_norm\": 0.8645688109938259,\n \"acc_norm_stderr\": 0.0034148422365171\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.49,\n \"acc_stderr\": 0.05024183937956912,\n \"acc_norm\": 0.49,\n \"acc_norm_stderr\": 0.05024183937956912\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.7481481481481481,\n \"acc_stderr\": 0.03749850709174021,\n \"acc_norm\": 0.7481481481481481,\n \"acc_norm_stderr\": 0.03749850709174021\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.875,\n \"acc_stderr\": 0.026913523521537846,\n \"acc_norm\": 0.875,\n \"acc_norm_stderr\": 0.026913523521537846\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.76,\n \"acc_stderr\": 0.04292346959909283,\n \"acc_norm\": 0.76,\n \"acc_norm_stderr\": 0.04292346959909283\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.8113207547169812,\n \"acc_stderr\": 0.024079995130062253,\n \"acc_norm\": 0.8113207547169812,\n \"acc_norm_stderr\": 0.024079995130062253\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.9027777777777778,\n \"acc_stderr\": 0.024774516250440182,\n \"acc_norm\": 0.9027777777777778,\n \"acc_norm_stderr\": 0.024774516250440182\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.55,\n \"acc_stderr\": 0.04999999999999999,\n \"acc_norm\": 0.55,\n \"acc_norm_stderr\": 0.04999999999999999\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.59,\n \"acc_stderr\": 0.04943110704237101,\n \"acc_norm\": 0.59,\n \"acc_norm_stderr\": 0.04943110704237101\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.42,\n \"acc_stderr\": 0.049604496374885836,\n \"acc_norm\": 0.42,\n \"acc_norm_stderr\": 0.049604496374885836\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.7109826589595376,\n \"acc_stderr\": 0.034564257450869995,\n \"acc_norm\": 0.7109826589595376,\n \"acc_norm_stderr\": 0.034564257450869995\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.5392156862745098,\n \"acc_stderr\": 0.04959859966384181,\n \"acc_norm\": 0.5392156862745098,\n \"acc_norm_stderr\": 0.04959859966384181\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.79,\n \"acc_stderr\": 0.04093601807403326,\n \"acc_norm\": 0.79,\n \"acc_norm_stderr\": 0.04093601807403326\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.7659574468085106,\n \"acc_stderr\": 0.027678452578212394,\n \"acc_norm\": 0.7659574468085106,\n \"acc_norm_stderr\": 0.027678452578212394\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.6052631578947368,\n \"acc_stderr\": 0.04598188057816542,\n \"acc_norm\": 0.6052631578947368,\n \"acc_norm_stderr\": 0.04598188057816542\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.7517241379310344,\n \"acc_stderr\": 0.036001056927277696,\n \"acc_norm\": 0.7517241379310344,\n \"acc_norm_stderr\": 0.036001056927277696\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.7513227513227513,\n \"acc_stderr\": 0.02226181769240016,\n \"acc_norm\": 0.7513227513227513,\n \"acc_norm_stderr\": 0.02226181769240016\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.5396825396825397,\n \"acc_stderr\": 0.04458029125470973,\n \"acc_norm\": 0.5396825396825397,\n \"acc_norm_stderr\": 0.04458029125470973\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.6,\n \"acc_stderr\": 0.049236596391733084,\n \"acc_norm\": 0.6,\n \"acc_norm_stderr\": 0.049236596391733084\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.9096774193548387,\n \"acc_stderr\": 0.01630657064448831,\n \"acc_norm\": 0.9096774193548387,\n \"acc_norm_stderr\": 0.01630657064448831\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.6798029556650246,\n \"acc_stderr\": 0.03282649385304151,\n \"acc_norm\": 0.6798029556650246,\n \"acc_norm_stderr\": 0.03282649385304151\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.77,\n \"acc_stderr\": 0.042295258468165044,\n \"acc_norm\": 0.77,\n \"acc_norm_stderr\": 0.042295258468165044\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.8787878787878788,\n \"acc_stderr\": 0.025485498373343237,\n \"acc_norm\": 0.8787878787878788,\n \"acc_norm_stderr\": 0.025485498373343237\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.9292929292929293,\n \"acc_stderr\": 0.01826310542019949,\n \"acc_norm\": 0.9292929292929293,\n \"acc_norm_stderr\": 0.01826310542019949\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.9740932642487047,\n \"acc_stderr\": 0.011464523356953162,\n \"acc_norm\": 0.9740932642487047,\n \"acc_norm_stderr\": 0.011464523356953162\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.8076923076923077,\n \"acc_stderr\": 0.019982347208637306,\n \"acc_norm\": 0.8076923076923077,\n \"acc_norm_stderr\": 0.019982347208637306\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.45185185185185184,\n \"acc_stderr\": 0.03034386299851262,\n \"acc_norm\": 0.45185185185185184,\n \"acc_norm_stderr\": 0.03034386299851262\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.8487394957983193,\n \"acc_stderr\": 0.02327425589870796,\n \"acc_norm\": 0.8487394957983193,\n \"acc_norm_stderr\": 0.02327425589870796\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.5231788079470199,\n \"acc_stderr\": 0.04078093859163085,\n \"acc_norm\": 0.5231788079470199,\n \"acc_norm_stderr\": 0.04078093859163085\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.9211009174311927,\n \"acc_stderr\": 0.011558198113769598,\n \"acc_norm\": 0.9211009174311927,\n \"acc_norm_stderr\": 0.011558198113769598\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.6666666666666666,\n \"acc_stderr\": 0.03214952147802749,\n \"acc_norm\": 0.6666666666666666,\n \"acc_norm_stderr\": 0.03214952147802749\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.9264705882352942,\n \"acc_stderr\": 0.018318855850089678,\n \"acc_norm\": 0.9264705882352942,\n \"acc_norm_stderr\": 0.018318855850089678\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.9113924050632911,\n \"acc_stderr\": 0.018498315206865384,\n \"acc_norm\": 0.9113924050632911,\n \"acc_norm_stderr\": 0.018498315206865384\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.8071748878923767,\n \"acc_stderr\": 0.026478240960489365,\n \"acc_norm\": 0.8071748878923767,\n \"acc_norm_stderr\": 0.026478240960489365\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.8702290076335878,\n \"acc_stderr\": 0.029473649496907065,\n \"acc_norm\": 0.8702290076335878,\n \"acc_norm_stderr\": 0.029473649496907065\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.8677685950413223,\n \"acc_stderr\": 0.0309227883204458,\n \"acc_norm\": 0.8677685950413223,\n \"acc_norm_stderr\": 0.0309227883204458\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.8981481481481481,\n \"acc_stderr\": 0.02923927267563275,\n \"acc_norm\": 0.8981481481481481,\n \"acc_norm_stderr\": 0.02923927267563275\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.8711656441717791,\n \"acc_stderr\": 0.026321383198783674,\n \"acc_norm\": 0.8711656441717791,\n \"acc_norm_stderr\": 0.026321383198783674\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.5535714285714286,\n \"acc_stderr\": 0.047184714852195865,\n \"acc_norm\": 0.5535714285714286,\n \"acc_norm_stderr\": 0.047184714852195865\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.8737864077669902,\n \"acc_stderr\": 0.03288180278808629,\n \"acc_norm\": 0.8737864077669902,\n \"acc_norm_stderr\": 0.03288180278808629\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.9444444444444444,\n \"acc_stderr\": 0.01500631280644693,\n \"acc_norm\": 0.9444444444444444,\n \"acc_norm_stderr\": 0.01500631280644693\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.86,\n \"acc_stderr\": 0.0348735088019777,\n \"acc_norm\": 0.86,\n \"acc_norm_stderr\": 0.0348735088019777\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.913154533844189,\n \"acc_stderr\": 0.01007029837774778,\n \"acc_norm\": 0.913154533844189,\n \"acc_norm_stderr\": 0.01007029837774778\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.8294797687861272,\n \"acc_stderr\": 0.020247961569303728,\n \"acc_norm\": 0.8294797687861272,\n \"acc_norm_stderr\": 0.020247961569303728\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.7988826815642458,\n \"acc_stderr\": 0.013405946402609044,\n \"acc_norm\": 0.7988826815642458,\n \"acc_norm_stderr\": 0.013405946402609044\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.8431372549019608,\n \"acc_stderr\": 0.02082375883758091,\n \"acc_norm\": 0.8431372549019608,\n \"acc_norm_stderr\": 0.02082375883758091\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.8006430868167203,\n \"acc_stderr\": 0.022691033780549656,\n \"acc_norm\": 0.8006430868167203,\n \"acc_norm_stderr\": 0.022691033780549656\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.8672839506172839,\n \"acc_stderr\": 0.01887735383957184,\n \"acc_norm\": 0.8672839506172839,\n \"acc_norm_stderr\": 0.01887735383957184\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.6418439716312057,\n \"acc_stderr\": 0.028602085862759422,\n \"acc_norm\": 0.6418439716312057,\n \"acc_norm_stderr\": 0.028602085862759422\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.5880052151238592,\n \"acc_stderr\": 0.01257087103214607,\n \"acc_norm\": 0.5880052151238592,\n \"acc_norm_stderr\": 0.01257087103214607\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.8345588235294118,\n \"acc_stderr\": 0.02257177102549475,\n \"acc_norm\": 0.8345588235294118,\n \"acc_norm_stderr\": 0.02257177102549475\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.8169934640522876,\n \"acc_stderr\": 0.01564306991127334,\n \"acc_norm\": 0.8169934640522876,\n \"acc_norm_stderr\": 0.01564306991127334\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.7272727272727273,\n \"acc_stderr\": 0.04265792110940589,\n \"acc_norm\": 0.7272727272727273,\n \"acc_norm_stderr\": 0.04265792110940589\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.8489795918367347,\n \"acc_stderr\": 0.022923004094736847,\n \"acc_norm\": 0.8489795918367347,\n \"acc_norm_stderr\": 0.022923004094736847\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.9104477611940298,\n \"acc_stderr\": 0.02019067053502792,\n \"acc_norm\": 0.9104477611940298,\n \"acc_norm_stderr\": 0.02019067053502792\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.92,\n \"acc_stderr\": 0.0272659924344291,\n \"acc_norm\": 0.92,\n \"acc_norm_stderr\": 0.0272659924344291\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.5783132530120482,\n \"acc_stderr\": 0.03844453181770917,\n \"acc_norm\": 0.5783132530120482,\n \"acc_norm_stderr\": 0.03844453181770917\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.8654970760233918,\n \"acc_stderr\": 0.0261682213446623,\n \"acc_norm\": 0.8654970760233918,\n \"acc_norm_stderr\": 0.0261682213446623\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.5483476132190942,\n \"mc1_stderr\": 0.01742148030027764,\n \"mc2\": 0.7100760148764914,\n \"mc2_stderr\": 0.01415531370772675\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.8334648776637726,\n \"acc_stderr\": 0.010470796496781093\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.730098559514784,\n \"acc_stderr\": 0.012227442856468896\n }\n}\n```", "repo_url": "https://huggingface.co/TomGrc/FusionNet_34Bx2_MoE_v0.1", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_02_02T06_58_32.819028", "path": ["**/details_harness|arc:challenge|25_2024-02-02T06-58-32.819028.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-02-02T06-58-32.819028.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_02_02T06_58_32.819028", "path": ["**/details_harness|gsm8k|5_2024-02-02T06-58-32.819028.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-02-02T06-58-32.819028.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_02_02T06_58_32.819028", "path": ["**/details_harness|hellaswag|10_2024-02-02T06-58-32.819028.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-02-02T06-58-32.819028.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_02_02T06_58_32.819028", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-02T06-58-32.819028.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-02-02T06-58-32.819028.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-02-02T06-58-32.819028.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-02-02T06-58-32.819028.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-02T06-58-32.819028.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-02-02T06-58-32.819028.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-02-02T06-58-32.819028.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-02-02T06-58-32.819028.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-02-02T06-58-32.819028.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-02-02T06-58-32.819028.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-02-02T06-58-32.819028.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-02-02T06-58-32.819028.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-02T06-58-32.819028.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-02-02T06-58-32.819028.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-02T06-58-32.819028.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-02T06-58-32.819028.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-02-02T06-58-32.819028.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-02-02T06-58-32.819028.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-02-02T06-58-32.819028.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-02T06-58-32.819028.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-02T06-58-32.819028.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-02T06-58-32.819028.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-02-02T06-58-32.819028.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-02T06-58-32.819028.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-02T06-58-32.819028.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-02T06-58-32.819028.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-02T06-58-32.819028.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-02-02T06-58-32.819028.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-02T06-58-32.819028.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-02T06-58-32.819028.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-02T06-58-32.819028.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-02T06-58-32.819028.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-02-02T06-58-32.819028.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-02-02T06-58-32.819028.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-02-02T06-58-32.819028.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-02-02T06-58-32.819028.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-02T06-58-32.819028.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-02-02T06-58-32.819028.parquet", "**/details_harness|hendrycksTest-management|5_2024-02-02T06-58-32.819028.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-02-02T06-58-32.819028.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-02-02T06-58-32.819028.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-02-02T06-58-32.819028.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-02-02T06-58-32.819028.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-02T06-58-32.819028.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-02-02T06-58-32.819028.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-02-02T06-58-32.819028.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-02-02T06-58-32.819028.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-02-02T06-58-32.819028.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-02-02T06-58-32.819028.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-02-02T06-58-32.819028.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-02-02T06-58-32.819028.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-02-02T06-58-32.819028.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-02-02T06-58-32.819028.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-02-02T06-58-32.819028.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-02T06-58-32.819028.parquet", "**/details_harness|hendrycksTest-virology|5_2024-02-02T06-58-32.819028.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-02-02T06-58-32.819028.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-02T06-58-32.819028.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-02-02T06-58-32.819028.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-02-02T06-58-32.819028.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-02-02T06-58-32.819028.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-02T06-58-32.819028.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-02-02T06-58-32.819028.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-02-02T06-58-32.819028.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-02-02T06-58-32.819028.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-02-02T06-58-32.819028.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-02-02T06-58-32.819028.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-02-02T06-58-32.819028.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-02-02T06-58-32.819028.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-02T06-58-32.819028.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-02-02T06-58-32.819028.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-02T06-58-32.819028.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-02T06-58-32.819028.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-02-02T06-58-32.819028.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-02-02T06-58-32.819028.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-02-02T06-58-32.819028.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-02T06-58-32.819028.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-02T06-58-32.819028.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-02T06-58-32.819028.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-02-02T06-58-32.819028.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-02T06-58-32.819028.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-02T06-58-32.819028.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-02T06-58-32.819028.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-02T06-58-32.819028.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-02-02T06-58-32.819028.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-02T06-58-32.819028.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-02T06-58-32.819028.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-02T06-58-32.819028.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-02T06-58-32.819028.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-02-02T06-58-32.819028.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-02-02T06-58-32.819028.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-02-02T06-58-32.819028.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-02-02T06-58-32.819028.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-02T06-58-32.819028.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-02-02T06-58-32.819028.parquet", "**/details_harness|hendrycksTest-management|5_2024-02-02T06-58-32.819028.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-02-02T06-58-32.819028.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-02-02T06-58-32.819028.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-02-02T06-58-32.819028.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-02-02T06-58-32.819028.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-02T06-58-32.819028.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-02-02T06-58-32.819028.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-02-02T06-58-32.819028.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-02-02T06-58-32.819028.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-02-02T06-58-32.819028.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-02-02T06-58-32.819028.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-02-02T06-58-32.819028.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-02-02T06-58-32.819028.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-02-02T06-58-32.819028.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-02-02T06-58-32.819028.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-02-02T06-58-32.819028.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-02T06-58-32.819028.parquet", "**/details_harness|hendrycksTest-virology|5_2024-02-02T06-58-32.819028.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-02-02T06-58-32.819028.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_02_02T06_58_32.819028", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-02T06-58-32.819028.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-02T06-58-32.819028.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_02_02T06_58_32.819028", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-02-02T06-58-32.819028.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-02-02T06-58-32.819028.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_02_02T06_58_32.819028", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-02-02T06-58-32.819028.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-02-02T06-58-32.819028.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_02_02T06_58_32.819028", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-02-02T06-58-32.819028.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-02-02T06-58-32.819028.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_02_02T06_58_32.819028", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-02T06-58-32.819028.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-02T06-58-32.819028.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_02_02T06_58_32.819028", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-02-02T06-58-32.819028.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-02-02T06-58-32.819028.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_02_02T06_58_32.819028", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-02-02T06-58-32.819028.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-02-02T06-58-32.819028.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_02_02T06_58_32.819028", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-02-02T06-58-32.819028.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-02-02T06-58-32.819028.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_02_02T06_58_32.819028", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-02-02T06-58-32.819028.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-02-02T06-58-32.819028.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_02_02T06_58_32.819028", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-02-02T06-58-32.819028.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-02-02T06-58-32.819028.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_02_02T06_58_32.819028", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-02-02T06-58-32.819028.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-02-02T06-58-32.819028.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_02_02T06_58_32.819028", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-02-02T06-58-32.819028.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-02-02T06-58-32.819028.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_02_02T06_58_32.819028", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-02T06-58-32.819028.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-02T06-58-32.819028.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_02_02T06_58_32.819028", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-02-02T06-58-32.819028.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-02-02T06-58-32.819028.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_02_02T06_58_32.819028", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-02T06-58-32.819028.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-02T06-58-32.819028.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_02_02T06_58_32.819028", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-02T06-58-32.819028.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-02T06-58-32.819028.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_02_02T06_58_32.819028", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-02-02T06-58-32.819028.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-02-02T06-58-32.819028.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_02_02T06_58_32.819028", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-02-02T06-58-32.819028.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-02-02T06-58-32.819028.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_02_02T06_58_32.819028", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-02-02T06-58-32.819028.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-02-02T06-58-32.819028.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_02_02T06_58_32.819028", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-02T06-58-32.819028.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-02T06-58-32.819028.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_02_02T06_58_32.819028", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-02T06-58-32.819028.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-02T06-58-32.819028.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_02_02T06_58_32.819028", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-02T06-58-32.819028.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-02T06-58-32.819028.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_02_02T06_58_32.819028", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-02-02T06-58-32.819028.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-02-02T06-58-32.819028.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_02_02T06_58_32.819028", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-02T06-58-32.819028.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-02T06-58-32.819028.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_02_02T06_58_32.819028", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-02T06-58-32.819028.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-02T06-58-32.819028.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_02_02T06_58_32.819028", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-02T06-58-32.819028.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-02T06-58-32.819028.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_02_02T06_58_32.819028", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-02T06-58-32.819028.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-02T06-58-32.819028.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_02_02T06_58_32.819028", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-02-02T06-58-32.819028.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-02-02T06-58-32.819028.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_02_02T06_58_32.819028", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-02T06-58-32.819028.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-02T06-58-32.819028.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_02_02T06_58_32.819028", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-02T06-58-32.819028.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-02T06-58-32.819028.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_02_02T06_58_32.819028", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-02T06-58-32.819028.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-02T06-58-32.819028.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_02_02T06_58_32.819028", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-02T06-58-32.819028.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-02T06-58-32.819028.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_02_02T06_58_32.819028", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-02-02T06-58-32.819028.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-02-02T06-58-32.819028.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_02_02T06_58_32.819028", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-02-02T06-58-32.819028.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-02-02T06-58-32.819028.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_02_02T06_58_32.819028", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-02-02T06-58-32.819028.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-02-02T06-58-32.819028.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_02_02T06_58_32.819028", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-02-02T06-58-32.819028.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-02-02T06-58-32.819028.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_02_02T06_58_32.819028", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-02T06-58-32.819028.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-02T06-58-32.819028.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_02_02T06_58_32.819028", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-02-02T06-58-32.819028.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-02-02T06-58-32.819028.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_02_02T06_58_32.819028", "path": ["**/details_harness|hendrycksTest-management|5_2024-02-02T06-58-32.819028.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-02-02T06-58-32.819028.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_02_02T06_58_32.819028", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-02-02T06-58-32.819028.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-02-02T06-58-32.819028.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_02_02T06_58_32.819028", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-02-02T06-58-32.819028.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-02-02T06-58-32.819028.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_02_02T06_58_32.819028", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-02-02T06-58-32.819028.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-02-02T06-58-32.819028.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_02_02T06_58_32.819028", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-02-02T06-58-32.819028.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-02-02T06-58-32.819028.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_02_02T06_58_32.819028", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-02T06-58-32.819028.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-02T06-58-32.819028.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_02_02T06_58_32.819028", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-02-02T06-58-32.819028.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-02-02T06-58-32.819028.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_02_02T06_58_32.819028", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-02-02T06-58-32.819028.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-02-02T06-58-32.819028.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_02_02T06_58_32.819028", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-02-02T06-58-32.819028.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-02-02T06-58-32.819028.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_02_02T06_58_32.819028", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-02-02T06-58-32.819028.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-02-02T06-58-32.819028.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_02_02T06_58_32.819028", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-02-02T06-58-32.819028.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-02-02T06-58-32.819028.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_02_02T06_58_32.819028", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-02-02T06-58-32.819028.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-02-02T06-58-32.819028.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_02_02T06_58_32.819028", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-02-02T06-58-32.819028.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-02-02T06-58-32.819028.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_02_02T06_58_32.819028", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-02-02T06-58-32.819028.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-02-02T06-58-32.819028.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_02_02T06_58_32.819028", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-02-02T06-58-32.819028.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-02-02T06-58-32.819028.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_02_02T06_58_32.819028", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-02-02T06-58-32.819028.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-02-02T06-58-32.819028.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_02_02T06_58_32.819028", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-02T06-58-32.819028.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-02T06-58-32.819028.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_02_02T06_58_32.819028", "path": ["**/details_harness|hendrycksTest-virology|5_2024-02-02T06-58-32.819028.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-02-02T06-58-32.819028.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_02_02T06_58_32.819028", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-02-02T06-58-32.819028.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-02-02T06-58-32.819028.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_02_02T06_58_32.819028", "path": ["**/details_harness|truthfulqa:mc|0_2024-02-02T06-58-32.819028.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-02-02T06-58-32.819028.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_02_02T06_58_32.819028", "path": ["**/details_harness|winogrande|5_2024-02-02T06-58-32.819028.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-02-02T06-58-32.819028.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_02_02T06_58_32.819028", "path": ["results_2024-02-02T06-58-32.819028.parquet"]}, {"split": "latest", "path": ["results_2024-02-02T06-58-32.819028.parquet"]}]}]}
2024-02-02T07:01:12+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of TomGrc/FusionNet_34Bx2_MoE_v0.1 Dataset automatically created during the evaluation run of model TomGrc/FusionNet_34Bx2_MoE_v0.1 on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-02-02T06:58:32.819028(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of TomGrc/FusionNet_34Bx2_MoE_v0.1\n\n\n\nDataset automatically created during the evaluation run of model TomGrc/FusionNet_34Bx2_MoE_v0.1 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-02-02T06:58:32.819028(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of TomGrc/FusionNet_34Bx2_MoE_v0.1\n\n\n\nDataset automatically created during the evaluation run of model TomGrc/FusionNet_34Bx2_MoE_v0.1 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-02-02T06:58:32.819028(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
d6a2fb2f8bbaab1443061246a202e38aba8db0d4
# Dataset Card for Evaluation run of JaeyeonKang/CCK_Gony_v0.1 <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [JaeyeonKang/CCK_Gony_v0.1](https://huggingface.co/JaeyeonKang/CCK_Gony_v0.1) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_JaeyeonKang__CCK_Gony_v0.1", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-02-02T07:12:22.322371](https://huggingface.co/datasets/open-llm-leaderboard/details_JaeyeonKang__CCK_Gony_v0.1/blob/main/results_2024-02-02T07-12-22.322371.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.7108483757620286, "acc_stderr": 0.030286675390153756, "acc_norm": 0.714628019750925, "acc_norm_stderr": 0.030868556998739066, "mc1": 0.48714810281517745, "mc1_stderr": 0.017497717944299825, "mc2": 0.6323461536157141, "mc2_stderr": 0.015117307818448413 }, "harness|arc:challenge|25": { "acc": 0.6689419795221843, "acc_stderr": 0.013752062419817832, "acc_norm": 0.7005119453924915, "acc_norm_stderr": 0.013385021637313572 }, "harness|hellaswag|10": { "acc": 0.6816371240788688, "acc_stderr": 0.004648890787581701, "acc_norm": 0.8727345150368453, "acc_norm_stderr": 0.0033258902255298615 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.36, "acc_stderr": 0.04824181513244218, "acc_norm": 0.36, "acc_norm_stderr": 0.04824181513244218 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.6814814814814815, "acc_stderr": 0.04024778401977109, "acc_norm": 0.6814814814814815, "acc_norm_stderr": 0.04024778401977109 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.7828947368421053, "acc_stderr": 0.03355045304882924, "acc_norm": 0.7828947368421053, "acc_norm_stderr": 0.03355045304882924 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.72, "acc_stderr": 0.04512608598542127, "acc_norm": 0.72, "acc_norm_stderr": 0.04512608598542127 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.7886792452830189, "acc_stderr": 0.025125766484827845, "acc_norm": 0.7886792452830189, "acc_norm_stderr": 0.025125766484827845 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.8194444444444444, "acc_stderr": 0.032166008088022675, "acc_norm": 0.8194444444444444, "acc_norm_stderr": 0.032166008088022675 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.5, "acc_stderr": 0.050251890762960605, "acc_norm": 0.5, "acc_norm_stderr": 0.050251890762960605 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.65, "acc_stderr": 0.04793724854411019, "acc_norm": 0.65, "acc_norm_stderr": 0.04793724854411019 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.47, "acc_stderr": 0.050161355804659205, "acc_norm": 0.47, "acc_norm_stderr": 0.050161355804659205 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.7572254335260116, "acc_stderr": 0.0326926380614177, "acc_norm": 0.7572254335260116, "acc_norm_stderr": 0.0326926380614177 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.43137254901960786, "acc_stderr": 0.04928099597287534, "acc_norm": 0.43137254901960786, "acc_norm_stderr": 0.04928099597287534 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.79, "acc_stderr": 0.040936018074033256, "acc_norm": 0.79, "acc_norm_stderr": 0.040936018074033256 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.6638297872340425, "acc_stderr": 0.030881618520676942, "acc_norm": 0.6638297872340425, "acc_norm_stderr": 0.030881618520676942 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.5964912280701754, "acc_stderr": 0.04615186962583707, "acc_norm": 0.5964912280701754, "acc_norm_stderr": 0.04615186962583707 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.6620689655172414, "acc_stderr": 0.039417076320648906, "acc_norm": 0.6620689655172414, "acc_norm_stderr": 0.039417076320648906 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.49206349206349204, "acc_stderr": 0.025748065871673286, "acc_norm": 0.49206349206349204, "acc_norm_stderr": 0.025748065871673286 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.5158730158730159, "acc_stderr": 0.044698818540726076, "acc_norm": 0.5158730158730159, "acc_norm_stderr": 0.044698818540726076 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.42, "acc_stderr": 0.049604496374885836, "acc_norm": 0.42, "acc_norm_stderr": 0.049604496374885836 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.8483870967741935, "acc_stderr": 0.02040261665441676, "acc_norm": 0.8483870967741935, "acc_norm_stderr": 0.02040261665441676 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.625615763546798, "acc_stderr": 0.03405155380561952, "acc_norm": 0.625615763546798, "acc_norm_stderr": 0.03405155380561952 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.77, "acc_stderr": 0.04229525846816508, "acc_norm": 0.77, "acc_norm_stderr": 0.04229525846816508 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.7878787878787878, "acc_stderr": 0.031922715695483, "acc_norm": 0.7878787878787878, "acc_norm_stderr": 0.031922715695483 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.8686868686868687, "acc_stderr": 0.024063156416822523, "acc_norm": 0.8686868686868687, "acc_norm_stderr": 0.024063156416822523 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.9585492227979274, "acc_stderr": 0.014385432857476461, "acc_norm": 0.9585492227979274, "acc_norm_stderr": 0.014385432857476461 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.7051282051282052, "acc_stderr": 0.023119362758232294, "acc_norm": 0.7051282051282052, "acc_norm_stderr": 0.023119362758232294 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.3814814814814815, "acc_stderr": 0.02961671892749759, "acc_norm": 0.3814814814814815, "acc_norm_stderr": 0.02961671892749759 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.8025210084033614, "acc_stderr": 0.025859164122051453, "acc_norm": 0.8025210084033614, "acc_norm_stderr": 0.025859164122051453 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.47019867549668876, "acc_stderr": 0.040752249922169775, "acc_norm": 0.47019867549668876, "acc_norm_stderr": 0.040752249922169775 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.8807339449541285, "acc_stderr": 0.01389572929258896, "acc_norm": 0.8807339449541285, "acc_norm_stderr": 0.01389572929258896 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.5879629629629629, "acc_stderr": 0.03356787758160831, "acc_norm": 0.5879629629629629, "acc_norm_stderr": 0.03356787758160831 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.8480392156862745, "acc_stderr": 0.025195658428931792, "acc_norm": 0.8480392156862745, "acc_norm_stderr": 0.025195658428931792 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.8607594936708861, "acc_stderr": 0.02253552635269271, "acc_norm": 0.8607594936708861, "acc_norm_stderr": 0.02253552635269271 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.7399103139013453, "acc_stderr": 0.029442495585857476, "acc_norm": 0.7399103139013453, "acc_norm_stderr": 0.029442495585857476 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.8015267175572519, "acc_stderr": 0.03498149385462469, "acc_norm": 0.8015267175572519, "acc_norm_stderr": 0.03498149385462469 }, "harness|hendrycksTest-international_law|5": { "acc": 0.8760330578512396, "acc_stderr": 0.030083098716035202, "acc_norm": 0.8760330578512396, "acc_norm_stderr": 0.030083098716035202 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.8148148148148148, "acc_stderr": 0.03755265865037182, "acc_norm": 0.8148148148148148, "acc_norm_stderr": 0.03755265865037182 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.8159509202453987, "acc_stderr": 0.030446777687971716, "acc_norm": 0.8159509202453987, "acc_norm_stderr": 0.030446777687971716 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.6339285714285714, "acc_stderr": 0.04572372358737431, "acc_norm": 0.6339285714285714, "acc_norm_stderr": 0.04572372358737431 }, "harness|hendrycksTest-management|5": { "acc": 0.8349514563106796, "acc_stderr": 0.036756688322331886, "acc_norm": 0.8349514563106796, "acc_norm_stderr": 0.036756688322331886 }, "harness|hendrycksTest-marketing|5": { "acc": 0.9273504273504274, "acc_stderr": 0.017004368568132366, "acc_norm": 0.9273504273504274, "acc_norm_stderr": 0.017004368568132366 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.79, "acc_stderr": 0.040936018074033256, "acc_norm": 0.79, "acc_norm_stderr": 0.040936018074033256 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.8825031928480205, "acc_stderr": 0.011515102251977214, "acc_norm": 0.8825031928480205, "acc_norm_stderr": 0.011515102251977214 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.7774566473988439, "acc_stderr": 0.02239421566194282, "acc_norm": 0.7774566473988439, "acc_norm_stderr": 0.02239421566194282 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.46256983240223465, "acc_stderr": 0.016675578687308085, "acc_norm": 0.46256983240223465, "acc_norm_stderr": 0.016675578687308085 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.8333333333333334, "acc_stderr": 0.021339479988816027, "acc_norm": 0.8333333333333334, "acc_norm_stderr": 0.021339479988816027 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.7877813504823151, "acc_stderr": 0.023222756797435094, "acc_norm": 0.7877813504823151, "acc_norm_stderr": 0.023222756797435094 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.8179012345679012, "acc_stderr": 0.02147349183480835, "acc_norm": 0.8179012345679012, "acc_norm_stderr": 0.02147349183480835 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.5354609929078015, "acc_stderr": 0.029752389657427054, "acc_norm": 0.5354609929078015, "acc_norm_stderr": 0.029752389657427054 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.5397653194263363, "acc_stderr": 0.012729785386598549, "acc_norm": 0.5397653194263363, "acc_norm_stderr": 0.012729785386598549 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.7830882352941176, "acc_stderr": 0.025035845227711274, "acc_norm": 0.7830882352941176, "acc_norm_stderr": 0.025035845227711274 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.7696078431372549, "acc_stderr": 0.01703522925803404, "acc_norm": 0.7696078431372549, "acc_norm_stderr": 0.01703522925803404 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.7181818181818181, "acc_stderr": 0.043091187099464585, "acc_norm": 0.7181818181818181, "acc_norm_stderr": 0.043091187099464585 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.7755102040816326, "acc_stderr": 0.0267114305555384, "acc_norm": 0.7755102040816326, "acc_norm_stderr": 0.0267114305555384 }, "harness|hendrycksTest-sociology|5": { "acc": 0.8855721393034826, "acc_stderr": 0.022509345325101706, "acc_norm": 0.8855721393034826, "acc_norm_stderr": 0.022509345325101706 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.9, "acc_stderr": 0.030151134457776334, "acc_norm": 0.9, "acc_norm_stderr": 0.030151134457776334 }, "harness|hendrycksTest-virology|5": { "acc": 0.5120481927710844, "acc_stderr": 0.038913644958358154, "acc_norm": 0.5120481927710844, "acc_norm_stderr": 0.038913644958358154 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.8771929824561403, "acc_stderr": 0.02517298435015577, "acc_norm": 0.8771929824561403, "acc_norm_stderr": 0.02517298435015577 }, "harness|truthfulqa:mc|0": { "mc1": 0.48714810281517745, "mc1_stderr": 0.017497717944299825, "mc2": 0.6323461536157141, "mc2_stderr": 0.015117307818448413 }, "harness|winogrande|5": { "acc": 0.8034727703235991, "acc_stderr": 0.011168120593569574 }, "harness|gsm8k|5": { "acc": 0.6178923426838514, "acc_stderr": 0.013384173935648495 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_JaeyeonKang__CCK_Gony_v0.1
[ "region:us" ]
2024-02-02T07:14:49+00:00
{"pretty_name": "Evaluation run of JaeyeonKang/CCK_Gony_v0.1", "dataset_summary": "Dataset automatically created during the evaluation run of model [JaeyeonKang/CCK_Gony_v0.1](https://huggingface.co/JaeyeonKang/CCK_Gony_v0.1) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_JaeyeonKang__CCK_Gony_v0.1\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-02-02T07:12:22.322371](https://huggingface.co/datasets/open-llm-leaderboard/details_JaeyeonKang__CCK_Gony_v0.1/blob/main/results_2024-02-02T07-12-22.322371.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.7108483757620286,\n \"acc_stderr\": 0.030286675390153756,\n \"acc_norm\": 0.714628019750925,\n \"acc_norm_stderr\": 0.030868556998739066,\n \"mc1\": 0.48714810281517745,\n \"mc1_stderr\": 0.017497717944299825,\n \"mc2\": 0.6323461536157141,\n \"mc2_stderr\": 0.015117307818448413\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.6689419795221843,\n \"acc_stderr\": 0.013752062419817832,\n \"acc_norm\": 0.7005119453924915,\n \"acc_norm_stderr\": 0.013385021637313572\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.6816371240788688,\n \"acc_stderr\": 0.004648890787581701,\n \"acc_norm\": 0.8727345150368453,\n \"acc_norm_stderr\": 0.0033258902255298615\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.36,\n \"acc_stderr\": 0.04824181513244218,\n \"acc_norm\": 0.36,\n \"acc_norm_stderr\": 0.04824181513244218\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.6814814814814815,\n \"acc_stderr\": 0.04024778401977109,\n \"acc_norm\": 0.6814814814814815,\n \"acc_norm_stderr\": 0.04024778401977109\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.7828947368421053,\n \"acc_stderr\": 0.03355045304882924,\n \"acc_norm\": 0.7828947368421053,\n \"acc_norm_stderr\": 0.03355045304882924\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.72,\n \"acc_stderr\": 0.04512608598542127,\n \"acc_norm\": 0.72,\n \"acc_norm_stderr\": 0.04512608598542127\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.7886792452830189,\n \"acc_stderr\": 0.025125766484827845,\n \"acc_norm\": 0.7886792452830189,\n \"acc_norm_stderr\": 0.025125766484827845\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.8194444444444444,\n \"acc_stderr\": 0.032166008088022675,\n \"acc_norm\": 0.8194444444444444,\n \"acc_norm_stderr\": 0.032166008088022675\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.5,\n \"acc_stderr\": 0.050251890762960605,\n \"acc_norm\": 0.5,\n \"acc_norm_stderr\": 0.050251890762960605\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.65,\n \"acc_stderr\": 0.04793724854411019,\n \"acc_norm\": 0.65,\n \"acc_norm_stderr\": 0.04793724854411019\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.47,\n \"acc_stderr\": 0.050161355804659205,\n \"acc_norm\": 0.47,\n \"acc_norm_stderr\": 0.050161355804659205\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.7572254335260116,\n \"acc_stderr\": 0.0326926380614177,\n \"acc_norm\": 0.7572254335260116,\n \"acc_norm_stderr\": 0.0326926380614177\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.43137254901960786,\n \"acc_stderr\": 0.04928099597287534,\n \"acc_norm\": 0.43137254901960786,\n \"acc_norm_stderr\": 0.04928099597287534\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.79,\n \"acc_stderr\": 0.040936018074033256,\n \"acc_norm\": 0.79,\n \"acc_norm_stderr\": 0.040936018074033256\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.6638297872340425,\n \"acc_stderr\": 0.030881618520676942,\n \"acc_norm\": 0.6638297872340425,\n \"acc_norm_stderr\": 0.030881618520676942\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.5964912280701754,\n \"acc_stderr\": 0.04615186962583707,\n \"acc_norm\": 0.5964912280701754,\n \"acc_norm_stderr\": 0.04615186962583707\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.6620689655172414,\n \"acc_stderr\": 0.039417076320648906,\n \"acc_norm\": 0.6620689655172414,\n \"acc_norm_stderr\": 0.039417076320648906\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.49206349206349204,\n \"acc_stderr\": 0.025748065871673286,\n \"acc_norm\": 0.49206349206349204,\n \"acc_norm_stderr\": 0.025748065871673286\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.5158730158730159,\n \"acc_stderr\": 0.044698818540726076,\n \"acc_norm\": 0.5158730158730159,\n \"acc_norm_stderr\": 0.044698818540726076\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.42,\n \"acc_stderr\": 0.049604496374885836,\n \"acc_norm\": 0.42,\n \"acc_norm_stderr\": 0.049604496374885836\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.8483870967741935,\n \"acc_stderr\": 0.02040261665441676,\n \"acc_norm\": 0.8483870967741935,\n \"acc_norm_stderr\": 0.02040261665441676\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.625615763546798,\n \"acc_stderr\": 0.03405155380561952,\n \"acc_norm\": 0.625615763546798,\n \"acc_norm_stderr\": 0.03405155380561952\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.77,\n \"acc_stderr\": 0.04229525846816508,\n \"acc_norm\": 0.77,\n \"acc_norm_stderr\": 0.04229525846816508\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.7878787878787878,\n \"acc_stderr\": 0.031922715695483,\n \"acc_norm\": 0.7878787878787878,\n \"acc_norm_stderr\": 0.031922715695483\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.8686868686868687,\n \"acc_stderr\": 0.024063156416822523,\n \"acc_norm\": 0.8686868686868687,\n \"acc_norm_stderr\": 0.024063156416822523\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.9585492227979274,\n \"acc_stderr\": 0.014385432857476461,\n \"acc_norm\": 0.9585492227979274,\n \"acc_norm_stderr\": 0.014385432857476461\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.7051282051282052,\n \"acc_stderr\": 0.023119362758232294,\n \"acc_norm\": 0.7051282051282052,\n \"acc_norm_stderr\": 0.023119362758232294\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.3814814814814815,\n \"acc_stderr\": 0.02961671892749759,\n \"acc_norm\": 0.3814814814814815,\n \"acc_norm_stderr\": 0.02961671892749759\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.8025210084033614,\n \"acc_stderr\": 0.025859164122051453,\n \"acc_norm\": 0.8025210084033614,\n \"acc_norm_stderr\": 0.025859164122051453\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.47019867549668876,\n \"acc_stderr\": 0.040752249922169775,\n \"acc_norm\": 0.47019867549668876,\n \"acc_norm_stderr\": 0.040752249922169775\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.8807339449541285,\n \"acc_stderr\": 0.01389572929258896,\n \"acc_norm\": 0.8807339449541285,\n \"acc_norm_stderr\": 0.01389572929258896\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.5879629629629629,\n \"acc_stderr\": 0.03356787758160831,\n \"acc_norm\": 0.5879629629629629,\n \"acc_norm_stderr\": 0.03356787758160831\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.8480392156862745,\n \"acc_stderr\": 0.025195658428931792,\n \"acc_norm\": 0.8480392156862745,\n \"acc_norm_stderr\": 0.025195658428931792\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.8607594936708861,\n \"acc_stderr\": 0.02253552635269271,\n \"acc_norm\": 0.8607594936708861,\n \"acc_norm_stderr\": 0.02253552635269271\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.7399103139013453,\n \"acc_stderr\": 0.029442495585857476,\n \"acc_norm\": 0.7399103139013453,\n \"acc_norm_stderr\": 0.029442495585857476\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.8015267175572519,\n \"acc_stderr\": 0.03498149385462469,\n \"acc_norm\": 0.8015267175572519,\n \"acc_norm_stderr\": 0.03498149385462469\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.8760330578512396,\n \"acc_stderr\": 0.030083098716035202,\n \"acc_norm\": 0.8760330578512396,\n \"acc_norm_stderr\": 0.030083098716035202\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.8148148148148148,\n \"acc_stderr\": 0.03755265865037182,\n \"acc_norm\": 0.8148148148148148,\n \"acc_norm_stderr\": 0.03755265865037182\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.8159509202453987,\n \"acc_stderr\": 0.030446777687971716,\n \"acc_norm\": 0.8159509202453987,\n \"acc_norm_stderr\": 0.030446777687971716\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.6339285714285714,\n \"acc_stderr\": 0.04572372358737431,\n \"acc_norm\": 0.6339285714285714,\n \"acc_norm_stderr\": 0.04572372358737431\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.8349514563106796,\n \"acc_stderr\": 0.036756688322331886,\n \"acc_norm\": 0.8349514563106796,\n \"acc_norm_stderr\": 0.036756688322331886\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.9273504273504274,\n \"acc_stderr\": 0.017004368568132366,\n \"acc_norm\": 0.9273504273504274,\n \"acc_norm_stderr\": 0.017004368568132366\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.79,\n \"acc_stderr\": 0.040936018074033256,\n \"acc_norm\": 0.79,\n \"acc_norm_stderr\": 0.040936018074033256\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.8825031928480205,\n \"acc_stderr\": 0.011515102251977214,\n \"acc_norm\": 0.8825031928480205,\n \"acc_norm_stderr\": 0.011515102251977214\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.7774566473988439,\n \"acc_stderr\": 0.02239421566194282,\n \"acc_norm\": 0.7774566473988439,\n \"acc_norm_stderr\": 0.02239421566194282\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.46256983240223465,\n \"acc_stderr\": 0.016675578687308085,\n \"acc_norm\": 0.46256983240223465,\n \"acc_norm_stderr\": 0.016675578687308085\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.8333333333333334,\n \"acc_stderr\": 0.021339479988816027,\n \"acc_norm\": 0.8333333333333334,\n \"acc_norm_stderr\": 0.021339479988816027\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.7877813504823151,\n \"acc_stderr\": 0.023222756797435094,\n \"acc_norm\": 0.7877813504823151,\n \"acc_norm_stderr\": 0.023222756797435094\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.8179012345679012,\n \"acc_stderr\": 0.02147349183480835,\n \"acc_norm\": 0.8179012345679012,\n \"acc_norm_stderr\": 0.02147349183480835\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.5354609929078015,\n \"acc_stderr\": 0.029752389657427054,\n \"acc_norm\": 0.5354609929078015,\n \"acc_norm_stderr\": 0.029752389657427054\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.5397653194263363,\n \"acc_stderr\": 0.012729785386598549,\n \"acc_norm\": 0.5397653194263363,\n \"acc_norm_stderr\": 0.012729785386598549\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.7830882352941176,\n \"acc_stderr\": 0.025035845227711274,\n \"acc_norm\": 0.7830882352941176,\n \"acc_norm_stderr\": 0.025035845227711274\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.7696078431372549,\n \"acc_stderr\": 0.01703522925803404,\n \"acc_norm\": 0.7696078431372549,\n \"acc_norm_stderr\": 0.01703522925803404\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.7181818181818181,\n \"acc_stderr\": 0.043091187099464585,\n \"acc_norm\": 0.7181818181818181,\n \"acc_norm_stderr\": 0.043091187099464585\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.7755102040816326,\n \"acc_stderr\": 0.0267114305555384,\n \"acc_norm\": 0.7755102040816326,\n \"acc_norm_stderr\": 0.0267114305555384\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.8855721393034826,\n \"acc_stderr\": 0.022509345325101706,\n \"acc_norm\": 0.8855721393034826,\n \"acc_norm_stderr\": 0.022509345325101706\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.9,\n \"acc_stderr\": 0.030151134457776334,\n \"acc_norm\": 0.9,\n \"acc_norm_stderr\": 0.030151134457776334\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.5120481927710844,\n \"acc_stderr\": 0.038913644958358154,\n \"acc_norm\": 0.5120481927710844,\n \"acc_norm_stderr\": 0.038913644958358154\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.8771929824561403,\n \"acc_stderr\": 0.02517298435015577,\n \"acc_norm\": 0.8771929824561403,\n \"acc_norm_stderr\": 0.02517298435015577\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.48714810281517745,\n \"mc1_stderr\": 0.017497717944299825,\n \"mc2\": 0.6323461536157141,\n \"mc2_stderr\": 0.015117307818448413\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.8034727703235991,\n \"acc_stderr\": 0.011168120593569574\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.6178923426838514,\n \"acc_stderr\": 0.013384173935648495\n }\n}\n```", "repo_url": "https://huggingface.co/JaeyeonKang/CCK_Gony_v0.1", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_02_02T07_12_22.322371", "path": ["**/details_harness|arc:challenge|25_2024-02-02T07-12-22.322371.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-02-02T07-12-22.322371.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_02_02T07_12_22.322371", "path": ["**/details_harness|gsm8k|5_2024-02-02T07-12-22.322371.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-02-02T07-12-22.322371.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_02_02T07_12_22.322371", "path": ["**/details_harness|hellaswag|10_2024-02-02T07-12-22.322371.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-02-02T07-12-22.322371.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_02_02T07_12_22.322371", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-02T07-12-22.322371.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-02-02T07-12-22.322371.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-02-02T07-12-22.322371.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-02-02T07-12-22.322371.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-02T07-12-22.322371.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-02-02T07-12-22.322371.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-02-02T07-12-22.322371.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-02-02T07-12-22.322371.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-02-02T07-12-22.322371.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-02-02T07-12-22.322371.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-02-02T07-12-22.322371.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-02-02T07-12-22.322371.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-02T07-12-22.322371.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-02-02T07-12-22.322371.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-02T07-12-22.322371.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-02T07-12-22.322371.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-02-02T07-12-22.322371.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-02-02T07-12-22.322371.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-02-02T07-12-22.322371.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-02T07-12-22.322371.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-02T07-12-22.322371.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-02T07-12-22.322371.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-02-02T07-12-22.322371.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-02T07-12-22.322371.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-02T07-12-22.322371.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-02T07-12-22.322371.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-02T07-12-22.322371.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-02-02T07-12-22.322371.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-02T07-12-22.322371.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-02T07-12-22.322371.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-02T07-12-22.322371.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-02T07-12-22.322371.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-02-02T07-12-22.322371.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-02-02T07-12-22.322371.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-02-02T07-12-22.322371.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-02-02T07-12-22.322371.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-02T07-12-22.322371.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-02-02T07-12-22.322371.parquet", "**/details_harness|hendrycksTest-management|5_2024-02-02T07-12-22.322371.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-02-02T07-12-22.322371.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-02-02T07-12-22.322371.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-02-02T07-12-22.322371.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-02-02T07-12-22.322371.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-02T07-12-22.322371.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-02-02T07-12-22.322371.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-02-02T07-12-22.322371.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-02-02T07-12-22.322371.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-02-02T07-12-22.322371.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-02-02T07-12-22.322371.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-02-02T07-12-22.322371.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-02-02T07-12-22.322371.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-02-02T07-12-22.322371.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-02-02T07-12-22.322371.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-02-02T07-12-22.322371.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-02T07-12-22.322371.parquet", "**/details_harness|hendrycksTest-virology|5_2024-02-02T07-12-22.322371.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-02-02T07-12-22.322371.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-02T07-12-22.322371.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-02-02T07-12-22.322371.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-02-02T07-12-22.322371.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-02-02T07-12-22.322371.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-02T07-12-22.322371.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-02-02T07-12-22.322371.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-02-02T07-12-22.322371.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-02-02T07-12-22.322371.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-02-02T07-12-22.322371.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-02-02T07-12-22.322371.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-02-02T07-12-22.322371.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-02-02T07-12-22.322371.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-02T07-12-22.322371.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-02-02T07-12-22.322371.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-02T07-12-22.322371.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-02T07-12-22.322371.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-02-02T07-12-22.322371.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-02-02T07-12-22.322371.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-02-02T07-12-22.322371.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-02T07-12-22.322371.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-02T07-12-22.322371.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-02T07-12-22.322371.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-02-02T07-12-22.322371.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-02T07-12-22.322371.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-02T07-12-22.322371.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-02T07-12-22.322371.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-02T07-12-22.322371.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-02-02T07-12-22.322371.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-02T07-12-22.322371.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-02T07-12-22.322371.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-02T07-12-22.322371.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-02T07-12-22.322371.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-02-02T07-12-22.322371.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-02-02T07-12-22.322371.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-02-02T07-12-22.322371.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-02-02T07-12-22.322371.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-02T07-12-22.322371.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-02-02T07-12-22.322371.parquet", "**/details_harness|hendrycksTest-management|5_2024-02-02T07-12-22.322371.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-02-02T07-12-22.322371.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-02-02T07-12-22.322371.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-02-02T07-12-22.322371.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-02-02T07-12-22.322371.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-02T07-12-22.322371.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-02-02T07-12-22.322371.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-02-02T07-12-22.322371.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-02-02T07-12-22.322371.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-02-02T07-12-22.322371.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-02-02T07-12-22.322371.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-02-02T07-12-22.322371.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-02-02T07-12-22.322371.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-02-02T07-12-22.322371.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-02-02T07-12-22.322371.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-02-02T07-12-22.322371.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-02T07-12-22.322371.parquet", "**/details_harness|hendrycksTest-virology|5_2024-02-02T07-12-22.322371.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-02-02T07-12-22.322371.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_02_02T07_12_22.322371", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-02T07-12-22.322371.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-02T07-12-22.322371.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_02_02T07_12_22.322371", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-02-02T07-12-22.322371.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-02-02T07-12-22.322371.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_02_02T07_12_22.322371", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-02-02T07-12-22.322371.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-02-02T07-12-22.322371.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_02_02T07_12_22.322371", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-02-02T07-12-22.322371.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-02-02T07-12-22.322371.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_02_02T07_12_22.322371", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-02T07-12-22.322371.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-02T07-12-22.322371.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_02_02T07_12_22.322371", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-02-02T07-12-22.322371.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-02-02T07-12-22.322371.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_02_02T07_12_22.322371", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-02-02T07-12-22.322371.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-02-02T07-12-22.322371.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_02_02T07_12_22.322371", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-02-02T07-12-22.322371.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-02-02T07-12-22.322371.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_02_02T07_12_22.322371", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-02-02T07-12-22.322371.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-02-02T07-12-22.322371.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_02_02T07_12_22.322371", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-02-02T07-12-22.322371.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-02-02T07-12-22.322371.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_02_02T07_12_22.322371", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-02-02T07-12-22.322371.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-02-02T07-12-22.322371.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_02_02T07_12_22.322371", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-02-02T07-12-22.322371.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-02-02T07-12-22.322371.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_02_02T07_12_22.322371", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-02T07-12-22.322371.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-02T07-12-22.322371.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_02_02T07_12_22.322371", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-02-02T07-12-22.322371.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-02-02T07-12-22.322371.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_02_02T07_12_22.322371", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-02T07-12-22.322371.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-02T07-12-22.322371.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_02_02T07_12_22.322371", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-02T07-12-22.322371.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-02T07-12-22.322371.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_02_02T07_12_22.322371", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-02-02T07-12-22.322371.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-02-02T07-12-22.322371.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_02_02T07_12_22.322371", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-02-02T07-12-22.322371.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-02-02T07-12-22.322371.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_02_02T07_12_22.322371", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-02-02T07-12-22.322371.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-02-02T07-12-22.322371.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_02_02T07_12_22.322371", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-02T07-12-22.322371.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-02T07-12-22.322371.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_02_02T07_12_22.322371", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-02T07-12-22.322371.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-02T07-12-22.322371.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_02_02T07_12_22.322371", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-02T07-12-22.322371.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-02T07-12-22.322371.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_02_02T07_12_22.322371", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-02-02T07-12-22.322371.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-02-02T07-12-22.322371.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_02_02T07_12_22.322371", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-02T07-12-22.322371.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-02T07-12-22.322371.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_02_02T07_12_22.322371", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-02T07-12-22.322371.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-02T07-12-22.322371.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_02_02T07_12_22.322371", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-02T07-12-22.322371.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-02T07-12-22.322371.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_02_02T07_12_22.322371", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-02T07-12-22.322371.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-02T07-12-22.322371.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_02_02T07_12_22.322371", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-02-02T07-12-22.322371.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-02-02T07-12-22.322371.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_02_02T07_12_22.322371", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-02T07-12-22.322371.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-02T07-12-22.322371.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_02_02T07_12_22.322371", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-02T07-12-22.322371.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-02T07-12-22.322371.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_02_02T07_12_22.322371", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-02T07-12-22.322371.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-02T07-12-22.322371.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_02_02T07_12_22.322371", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-02T07-12-22.322371.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-02T07-12-22.322371.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_02_02T07_12_22.322371", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-02-02T07-12-22.322371.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-02-02T07-12-22.322371.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_02_02T07_12_22.322371", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-02-02T07-12-22.322371.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-02-02T07-12-22.322371.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_02_02T07_12_22.322371", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-02-02T07-12-22.322371.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-02-02T07-12-22.322371.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_02_02T07_12_22.322371", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-02-02T07-12-22.322371.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-02-02T07-12-22.322371.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_02_02T07_12_22.322371", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-02T07-12-22.322371.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-02T07-12-22.322371.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_02_02T07_12_22.322371", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-02-02T07-12-22.322371.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-02-02T07-12-22.322371.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_02_02T07_12_22.322371", "path": ["**/details_harness|hendrycksTest-management|5_2024-02-02T07-12-22.322371.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-02-02T07-12-22.322371.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_02_02T07_12_22.322371", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-02-02T07-12-22.322371.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-02-02T07-12-22.322371.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_02_02T07_12_22.322371", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-02-02T07-12-22.322371.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-02-02T07-12-22.322371.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_02_02T07_12_22.322371", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-02-02T07-12-22.322371.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-02-02T07-12-22.322371.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_02_02T07_12_22.322371", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-02-02T07-12-22.322371.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-02-02T07-12-22.322371.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_02_02T07_12_22.322371", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-02T07-12-22.322371.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-02T07-12-22.322371.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_02_02T07_12_22.322371", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-02-02T07-12-22.322371.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-02-02T07-12-22.322371.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_02_02T07_12_22.322371", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-02-02T07-12-22.322371.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-02-02T07-12-22.322371.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_02_02T07_12_22.322371", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-02-02T07-12-22.322371.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-02-02T07-12-22.322371.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_02_02T07_12_22.322371", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-02-02T07-12-22.322371.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-02-02T07-12-22.322371.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_02_02T07_12_22.322371", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-02-02T07-12-22.322371.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-02-02T07-12-22.322371.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_02_02T07_12_22.322371", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-02-02T07-12-22.322371.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-02-02T07-12-22.322371.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_02_02T07_12_22.322371", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-02-02T07-12-22.322371.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-02-02T07-12-22.322371.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_02_02T07_12_22.322371", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-02-02T07-12-22.322371.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-02-02T07-12-22.322371.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_02_02T07_12_22.322371", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-02-02T07-12-22.322371.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-02-02T07-12-22.322371.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_02_02T07_12_22.322371", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-02-02T07-12-22.322371.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-02-02T07-12-22.322371.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_02_02T07_12_22.322371", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-02T07-12-22.322371.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-02T07-12-22.322371.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_02_02T07_12_22.322371", "path": ["**/details_harness|hendrycksTest-virology|5_2024-02-02T07-12-22.322371.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-02-02T07-12-22.322371.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_02_02T07_12_22.322371", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-02-02T07-12-22.322371.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-02-02T07-12-22.322371.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_02_02T07_12_22.322371", "path": ["**/details_harness|truthfulqa:mc|0_2024-02-02T07-12-22.322371.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-02-02T07-12-22.322371.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_02_02T07_12_22.322371", "path": ["**/details_harness|winogrande|5_2024-02-02T07-12-22.322371.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-02-02T07-12-22.322371.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_02_02T07_12_22.322371", "path": ["results_2024-02-02T07-12-22.322371.parquet"]}, {"split": "latest", "path": ["results_2024-02-02T07-12-22.322371.parquet"]}]}]}
2024-02-02T07:15:17+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of JaeyeonKang/CCK_Gony_v0.1 Dataset automatically created during the evaluation run of model JaeyeonKang/CCK_Gony_v0.1 on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-02-02T07:12:22.322371(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of JaeyeonKang/CCK_Gony_v0.1\n\n\n\nDataset automatically created during the evaluation run of model JaeyeonKang/CCK_Gony_v0.1 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-02-02T07:12:22.322371(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of JaeyeonKang/CCK_Gony_v0.1\n\n\n\nDataset automatically created during the evaluation run of model JaeyeonKang/CCK_Gony_v0.1 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-02-02T07:12:22.322371(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
f8a1fe7457dab4403dc1e6db8783f45dd744495b
This is README file.
sg69291/data_set_test
[ "task_categories:translation", "size_categories:n<1K", "language:en", "license:mit", "code", "region:us" ]
2024-02-02T07:30:07+00:00
{"language": ["en"], "license": "mit", "size_categories": ["n<1K"], "task_categories": ["translation"], "pretty_name": "default", "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "Train/*.csv"}, {"split": "test", "path": "Test/*.csv"}], "default": true}], "tags": ["code"]}
2024-02-02T10:42:25+00:00
[]
[ "en" ]
TAGS #task_categories-translation #size_categories-n<1K #language-English #license-mit #code #region-us
This is README file.
[]
[ "TAGS\n#task_categories-translation #size_categories-n<1K #language-English #license-mit #code #region-us \n" ]
76965003142d654658c19b985baace06119c108f
# Dataset Card for Evaluation run of NobodyExistsOnTheInternet/code-llama-70b-python-instruct <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [NobodyExistsOnTheInternet/code-llama-70b-python-instruct](https://huggingface.co/NobodyExistsOnTheInternet/code-llama-70b-python-instruct) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_NobodyExistsOnTheInternet__code-llama-70b-python-instruct", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-02-02T07:32:36.212408](https://huggingface.co/datasets/open-llm-leaderboard/details_NobodyExistsOnTheInternet__code-llama-70b-python-instruct/blob/main/results_2024-02-02T07-32-36.212408.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.23570005087714752, "acc_stderr": 0.03010714142054835, "acc_norm": 0.236436918646833, "acc_norm_stderr": 0.030906275034796246, "mc1": 0.2386780905752754, "mc1_stderr": 0.014922629695456416, "mc2": 0.49255427998260237, "mc2_stderr": 0.016706749580601723 }, "harness|arc:challenge|25": { "acc": 0.23378839590443687, "acc_stderr": 0.012368225378507148, "acc_norm": 0.2960750853242321, "acc_norm_stderr": 0.013340916085246261 }, "harness|hellaswag|10": { "acc": 0.25433180641306513, "acc_stderr": 0.00434594938238237, "acc_norm": 0.2566221868153754, "acc_norm_stderr": 0.004358764596401037 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.22, "acc_stderr": 0.04163331998932268, "acc_norm": 0.22, "acc_norm_stderr": 0.04163331998932268 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.18518518518518517, "acc_stderr": 0.03355677216313142, "acc_norm": 0.18518518518518517, "acc_norm_stderr": 0.03355677216313142 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.2565789473684211, "acc_stderr": 0.03554180368025689, "acc_norm": 0.2565789473684211, "acc_norm_stderr": 0.03554180368025689 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.3, "acc_stderr": 0.046056618647183814, "acc_norm": 0.3, "acc_norm_stderr": 0.046056618647183814 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.21509433962264152, "acc_stderr": 0.02528839450289137, "acc_norm": 0.21509433962264152, "acc_norm_stderr": 0.02528839450289137 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.2638888888888889, "acc_stderr": 0.03685651095897532, "acc_norm": 0.2638888888888889, "acc_norm_stderr": 0.03685651095897532 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.2, "acc_stderr": 0.04020151261036845, "acc_norm": 0.2, "acc_norm_stderr": 0.04020151261036845 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.26, "acc_stderr": 0.0440844002276808, "acc_norm": 0.26, "acc_norm_stderr": 0.0440844002276808 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.21, "acc_stderr": 0.040936018074033256, "acc_norm": 0.21, "acc_norm_stderr": 0.040936018074033256 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.23121387283236994, "acc_stderr": 0.03214737302029468, "acc_norm": 0.23121387283236994, "acc_norm_stderr": 0.03214737302029468 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.21568627450980393, "acc_stderr": 0.04092563958237654, "acc_norm": 0.21568627450980393, "acc_norm_stderr": 0.04092563958237654 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.28, "acc_stderr": 0.045126085985421276, "acc_norm": 0.28, "acc_norm_stderr": 0.045126085985421276 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.26382978723404255, "acc_stderr": 0.028809989854102973, "acc_norm": 0.26382978723404255, "acc_norm_stderr": 0.028809989854102973 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.23684210526315788, "acc_stderr": 0.039994238792813365, "acc_norm": 0.23684210526315788, "acc_norm_stderr": 0.039994238792813365 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.2413793103448276, "acc_stderr": 0.03565998174135302, "acc_norm": 0.2413793103448276, "acc_norm_stderr": 0.03565998174135302 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.20899470899470898, "acc_stderr": 0.02094048156533486, "acc_norm": 0.20899470899470898, "acc_norm_stderr": 0.02094048156533486 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.2777777777777778, "acc_stderr": 0.04006168083848875, "acc_norm": 0.2777777777777778, "acc_norm_stderr": 0.04006168083848875 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.18, "acc_stderr": 0.038612291966536934, "acc_norm": 0.18, "acc_norm_stderr": 0.038612291966536934 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.1774193548387097, "acc_stderr": 0.02173254068932927, "acc_norm": 0.1774193548387097, "acc_norm_stderr": 0.02173254068932927 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.15270935960591134, "acc_stderr": 0.02530890453938063, "acc_norm": 0.15270935960591134, "acc_norm_stderr": 0.02530890453938063 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.32, "acc_stderr": 0.046882617226215034, "acc_norm": 0.32, "acc_norm_stderr": 0.046882617226215034 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.2545454545454545, "acc_stderr": 0.034015067152490405, "acc_norm": 0.2545454545454545, "acc_norm_stderr": 0.034015067152490405 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.17676767676767677, "acc_stderr": 0.027178752639044915, "acc_norm": 0.17676767676767677, "acc_norm_stderr": 0.027178752639044915 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.19689119170984457, "acc_stderr": 0.028697873971860664, "acc_norm": 0.19689119170984457, "acc_norm_stderr": 0.028697873971860664 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.20256410256410257, "acc_stderr": 0.020377660970371372, "acc_norm": 0.20256410256410257, "acc_norm_stderr": 0.020377660970371372 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.2111111111111111, "acc_stderr": 0.024882116857655075, "acc_norm": 0.2111111111111111, "acc_norm_stderr": 0.024882116857655075 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.21008403361344538, "acc_stderr": 0.026461398717471874, "acc_norm": 0.21008403361344538, "acc_norm_stderr": 0.026461398717471874 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.2052980132450331, "acc_stderr": 0.03297986648473836, "acc_norm": 0.2052980132450331, "acc_norm_stderr": 0.03297986648473836 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.1926605504587156, "acc_stderr": 0.016909276884936094, "acc_norm": 0.1926605504587156, "acc_norm_stderr": 0.016909276884936094 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.18055555555555555, "acc_stderr": 0.02623287897149166, "acc_norm": 0.18055555555555555, "acc_norm_stderr": 0.02623287897149166 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.25980392156862747, "acc_stderr": 0.030778554678693264, "acc_norm": 0.25980392156862747, "acc_norm_stderr": 0.030778554678693264 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.26582278481012656, "acc_stderr": 0.028756799629658335, "acc_norm": 0.26582278481012656, "acc_norm_stderr": 0.028756799629658335 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.31390134529147984, "acc_stderr": 0.031146796482972465, "acc_norm": 0.31390134529147984, "acc_norm_stderr": 0.031146796482972465 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.2595419847328244, "acc_stderr": 0.03844876139785271, "acc_norm": 0.2595419847328244, "acc_norm_stderr": 0.03844876139785271 }, "harness|hendrycksTest-international_law|5": { "acc": 0.24793388429752067, "acc_stderr": 0.039418975265163025, "acc_norm": 0.24793388429752067, "acc_norm_stderr": 0.039418975265163025 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.25925925925925924, "acc_stderr": 0.042365112580946336, "acc_norm": 0.25925925925925924, "acc_norm_stderr": 0.042365112580946336 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.22085889570552147, "acc_stderr": 0.032591773927421776, "acc_norm": 0.22085889570552147, "acc_norm_stderr": 0.032591773927421776 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.21428571428571427, "acc_stderr": 0.03894641120044792, "acc_norm": 0.21428571428571427, "acc_norm_stderr": 0.03894641120044792 }, "harness|hendrycksTest-management|5": { "acc": 0.17475728155339806, "acc_stderr": 0.037601780060266224, "acc_norm": 0.17475728155339806, "acc_norm_stderr": 0.037601780060266224 }, "harness|hendrycksTest-marketing|5": { "acc": 0.2905982905982906, "acc_stderr": 0.02974504857267404, "acc_norm": 0.2905982905982906, "acc_norm_stderr": 0.02974504857267404 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.3, "acc_stderr": 0.046056618647183814, "acc_norm": 0.3, "acc_norm_stderr": 0.046056618647183814 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.23754789272030652, "acc_stderr": 0.015218733046150193, "acc_norm": 0.23754789272030652, "acc_norm_stderr": 0.015218733046150193 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.24855491329479767, "acc_stderr": 0.023267528432100174, "acc_norm": 0.24855491329479767, "acc_norm_stderr": 0.023267528432100174 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.23798882681564246, "acc_stderr": 0.014242630070574915, "acc_norm": 0.23798882681564246, "acc_norm_stderr": 0.014242630070574915 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.22549019607843138, "acc_stderr": 0.023929155517351294, "acc_norm": 0.22549019607843138, "acc_norm_stderr": 0.023929155517351294 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.1864951768488746, "acc_stderr": 0.02212243977248077, "acc_norm": 0.1864951768488746, "acc_norm_stderr": 0.02212243977248077 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.21604938271604937, "acc_stderr": 0.022899162918445806, "acc_norm": 0.21604938271604937, "acc_norm_stderr": 0.022899162918445806 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.2375886524822695, "acc_stderr": 0.025389512552729906, "acc_norm": 0.2375886524822695, "acc_norm_stderr": 0.025389512552729906 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.27053455019556716, "acc_stderr": 0.011345996743539264, "acc_norm": 0.27053455019556716, "acc_norm_stderr": 0.011345996743539264 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.16544117647058823, "acc_stderr": 0.022571771025494767, "acc_norm": 0.16544117647058823, "acc_norm_stderr": 0.022571771025494767 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.25, "acc_stderr": 0.01751781884501444, "acc_norm": 0.25, "acc_norm_stderr": 0.01751781884501444 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.21818181818181817, "acc_stderr": 0.03955932861795833, "acc_norm": 0.21818181818181817, "acc_norm_stderr": 0.03955932861795833 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.24081632653061225, "acc_stderr": 0.027372942201788163, "acc_norm": 0.24081632653061225, "acc_norm_stderr": 0.027372942201788163 }, "harness|hendrycksTest-sociology|5": { "acc": 0.24378109452736318, "acc_stderr": 0.03036049015401465, "acc_norm": 0.24378109452736318, "acc_norm_stderr": 0.03036049015401465 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.28, "acc_stderr": 0.04512608598542128, "acc_norm": 0.28, "acc_norm_stderr": 0.04512608598542128 }, "harness|hendrycksTest-virology|5": { "acc": 0.28313253012048195, "acc_stderr": 0.03507295431370518, "acc_norm": 0.28313253012048195, "acc_norm_stderr": 0.03507295431370518 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.3216374269005848, "acc_stderr": 0.03582529442573122, "acc_norm": 0.3216374269005848, "acc_norm_stderr": 0.03582529442573122 }, "harness|truthfulqa:mc|0": { "mc1": 0.2386780905752754, "mc1_stderr": 0.014922629695456416, "mc2": 0.49255427998260237, "mc2_stderr": 0.016706749580601723 }, "harness|winogrande|5": { "acc": 0.4925019731649566, "acc_stderr": 0.014050905521228577 }, "harness|gsm8k|5": { "acc": 0.0, "acc_stderr": 0.0 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_NobodyExistsOnTheInternet__code-llama-70b-python-instruct
[ "region:us" ]
2024-02-02T07:34:58+00:00
{"pretty_name": "Evaluation run of NobodyExistsOnTheInternet/code-llama-70b-python-instruct", "dataset_summary": "Dataset automatically created during the evaluation run of model [NobodyExistsOnTheInternet/code-llama-70b-python-instruct](https://huggingface.co/NobodyExistsOnTheInternet/code-llama-70b-python-instruct) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_NobodyExistsOnTheInternet__code-llama-70b-python-instruct\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-02-02T07:32:36.212408](https://huggingface.co/datasets/open-llm-leaderboard/details_NobodyExistsOnTheInternet__code-llama-70b-python-instruct/blob/main/results_2024-02-02T07-32-36.212408.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.23570005087714752,\n \"acc_stderr\": 0.03010714142054835,\n \"acc_norm\": 0.236436918646833,\n \"acc_norm_stderr\": 0.030906275034796246,\n \"mc1\": 0.2386780905752754,\n \"mc1_stderr\": 0.014922629695456416,\n \"mc2\": 0.49255427998260237,\n \"mc2_stderr\": 0.016706749580601723\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.23378839590443687,\n \"acc_stderr\": 0.012368225378507148,\n \"acc_norm\": 0.2960750853242321,\n \"acc_norm_stderr\": 0.013340916085246261\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.25433180641306513,\n \"acc_stderr\": 0.00434594938238237,\n \"acc_norm\": 0.2566221868153754,\n \"acc_norm_stderr\": 0.004358764596401037\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.22,\n \"acc_stderr\": 0.04163331998932268,\n \"acc_norm\": 0.22,\n \"acc_norm_stderr\": 0.04163331998932268\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.18518518518518517,\n \"acc_stderr\": 0.03355677216313142,\n \"acc_norm\": 0.18518518518518517,\n \"acc_norm_stderr\": 0.03355677216313142\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.2565789473684211,\n \"acc_stderr\": 0.03554180368025689,\n \"acc_norm\": 0.2565789473684211,\n \"acc_norm_stderr\": 0.03554180368025689\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.3,\n \"acc_stderr\": 0.046056618647183814,\n \"acc_norm\": 0.3,\n \"acc_norm_stderr\": 0.046056618647183814\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.21509433962264152,\n \"acc_stderr\": 0.02528839450289137,\n \"acc_norm\": 0.21509433962264152,\n \"acc_norm_stderr\": 0.02528839450289137\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.2638888888888889,\n \"acc_stderr\": 0.03685651095897532,\n \"acc_norm\": 0.2638888888888889,\n \"acc_norm_stderr\": 0.03685651095897532\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.2,\n \"acc_stderr\": 0.04020151261036845,\n \"acc_norm\": 0.2,\n \"acc_norm_stderr\": 0.04020151261036845\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.26,\n \"acc_stderr\": 0.0440844002276808,\n \"acc_norm\": 0.26,\n \"acc_norm_stderr\": 0.0440844002276808\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.21,\n \"acc_stderr\": 0.040936018074033256,\n \"acc_norm\": 0.21,\n \"acc_norm_stderr\": 0.040936018074033256\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.23121387283236994,\n \"acc_stderr\": 0.03214737302029468,\n \"acc_norm\": 0.23121387283236994,\n \"acc_norm_stderr\": 0.03214737302029468\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.21568627450980393,\n \"acc_stderr\": 0.04092563958237654,\n \"acc_norm\": 0.21568627450980393,\n \"acc_norm_stderr\": 0.04092563958237654\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.28,\n \"acc_stderr\": 0.045126085985421276,\n \"acc_norm\": 0.28,\n \"acc_norm_stderr\": 0.045126085985421276\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.26382978723404255,\n \"acc_stderr\": 0.028809989854102973,\n \"acc_norm\": 0.26382978723404255,\n \"acc_norm_stderr\": 0.028809989854102973\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.23684210526315788,\n \"acc_stderr\": 0.039994238792813365,\n \"acc_norm\": 0.23684210526315788,\n \"acc_norm_stderr\": 0.039994238792813365\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.2413793103448276,\n \"acc_stderr\": 0.03565998174135302,\n \"acc_norm\": 0.2413793103448276,\n \"acc_norm_stderr\": 0.03565998174135302\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.20899470899470898,\n \"acc_stderr\": 0.02094048156533486,\n \"acc_norm\": 0.20899470899470898,\n \"acc_norm_stderr\": 0.02094048156533486\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.2777777777777778,\n \"acc_stderr\": 0.04006168083848875,\n \"acc_norm\": 0.2777777777777778,\n \"acc_norm_stderr\": 0.04006168083848875\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.18,\n \"acc_stderr\": 0.038612291966536934,\n \"acc_norm\": 0.18,\n \"acc_norm_stderr\": 0.038612291966536934\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.1774193548387097,\n \"acc_stderr\": 0.02173254068932927,\n \"acc_norm\": 0.1774193548387097,\n \"acc_norm_stderr\": 0.02173254068932927\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.15270935960591134,\n \"acc_stderr\": 0.02530890453938063,\n \"acc_norm\": 0.15270935960591134,\n \"acc_norm_stderr\": 0.02530890453938063\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.32,\n \"acc_stderr\": 0.046882617226215034,\n \"acc_norm\": 0.32,\n \"acc_norm_stderr\": 0.046882617226215034\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.2545454545454545,\n \"acc_stderr\": 0.034015067152490405,\n \"acc_norm\": 0.2545454545454545,\n \"acc_norm_stderr\": 0.034015067152490405\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.17676767676767677,\n \"acc_stderr\": 0.027178752639044915,\n \"acc_norm\": 0.17676767676767677,\n \"acc_norm_stderr\": 0.027178752639044915\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.19689119170984457,\n \"acc_stderr\": 0.028697873971860664,\n \"acc_norm\": 0.19689119170984457,\n \"acc_norm_stderr\": 0.028697873971860664\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.20256410256410257,\n \"acc_stderr\": 0.020377660970371372,\n \"acc_norm\": 0.20256410256410257,\n \"acc_norm_stderr\": 0.020377660970371372\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.2111111111111111,\n \"acc_stderr\": 0.024882116857655075,\n \"acc_norm\": 0.2111111111111111,\n \"acc_norm_stderr\": 0.024882116857655075\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.21008403361344538,\n \"acc_stderr\": 0.026461398717471874,\n \"acc_norm\": 0.21008403361344538,\n \"acc_norm_stderr\": 0.026461398717471874\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.2052980132450331,\n \"acc_stderr\": 0.03297986648473836,\n \"acc_norm\": 0.2052980132450331,\n \"acc_norm_stderr\": 0.03297986648473836\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.1926605504587156,\n \"acc_stderr\": 0.016909276884936094,\n \"acc_norm\": 0.1926605504587156,\n \"acc_norm_stderr\": 0.016909276884936094\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.18055555555555555,\n \"acc_stderr\": 0.02623287897149166,\n \"acc_norm\": 0.18055555555555555,\n \"acc_norm_stderr\": 0.02623287897149166\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.25980392156862747,\n \"acc_stderr\": 0.030778554678693264,\n \"acc_norm\": 0.25980392156862747,\n \"acc_norm_stderr\": 0.030778554678693264\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.26582278481012656,\n \"acc_stderr\": 0.028756799629658335,\n \"acc_norm\": 0.26582278481012656,\n \"acc_norm_stderr\": 0.028756799629658335\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.31390134529147984,\n \"acc_stderr\": 0.031146796482972465,\n \"acc_norm\": 0.31390134529147984,\n \"acc_norm_stderr\": 0.031146796482972465\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.2595419847328244,\n \"acc_stderr\": 0.03844876139785271,\n \"acc_norm\": 0.2595419847328244,\n \"acc_norm_stderr\": 0.03844876139785271\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.24793388429752067,\n \"acc_stderr\": 0.039418975265163025,\n \"acc_norm\": 0.24793388429752067,\n \"acc_norm_stderr\": 0.039418975265163025\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.25925925925925924,\n \"acc_stderr\": 0.042365112580946336,\n \"acc_norm\": 0.25925925925925924,\n \"acc_norm_stderr\": 0.042365112580946336\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.22085889570552147,\n \"acc_stderr\": 0.032591773927421776,\n \"acc_norm\": 0.22085889570552147,\n \"acc_norm_stderr\": 0.032591773927421776\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.21428571428571427,\n \"acc_stderr\": 0.03894641120044792,\n \"acc_norm\": 0.21428571428571427,\n \"acc_norm_stderr\": 0.03894641120044792\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.17475728155339806,\n \"acc_stderr\": 0.037601780060266224,\n \"acc_norm\": 0.17475728155339806,\n \"acc_norm_stderr\": 0.037601780060266224\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.2905982905982906,\n \"acc_stderr\": 0.02974504857267404,\n \"acc_norm\": 0.2905982905982906,\n \"acc_norm_stderr\": 0.02974504857267404\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.3,\n \"acc_stderr\": 0.046056618647183814,\n \"acc_norm\": 0.3,\n \"acc_norm_stderr\": 0.046056618647183814\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.23754789272030652,\n \"acc_stderr\": 0.015218733046150193,\n \"acc_norm\": 0.23754789272030652,\n \"acc_norm_stderr\": 0.015218733046150193\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.24855491329479767,\n \"acc_stderr\": 0.023267528432100174,\n \"acc_norm\": 0.24855491329479767,\n \"acc_norm_stderr\": 0.023267528432100174\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.23798882681564246,\n \"acc_stderr\": 0.014242630070574915,\n \"acc_norm\": 0.23798882681564246,\n \"acc_norm_stderr\": 0.014242630070574915\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.22549019607843138,\n \"acc_stderr\": 0.023929155517351294,\n \"acc_norm\": 0.22549019607843138,\n \"acc_norm_stderr\": 0.023929155517351294\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.1864951768488746,\n \"acc_stderr\": 0.02212243977248077,\n \"acc_norm\": 0.1864951768488746,\n \"acc_norm_stderr\": 0.02212243977248077\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.21604938271604937,\n \"acc_stderr\": 0.022899162918445806,\n \"acc_norm\": 0.21604938271604937,\n \"acc_norm_stderr\": 0.022899162918445806\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.2375886524822695,\n \"acc_stderr\": 0.025389512552729906,\n \"acc_norm\": 0.2375886524822695,\n \"acc_norm_stderr\": 0.025389512552729906\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.27053455019556716,\n \"acc_stderr\": 0.011345996743539264,\n \"acc_norm\": 0.27053455019556716,\n \"acc_norm_stderr\": 0.011345996743539264\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.16544117647058823,\n \"acc_stderr\": 0.022571771025494767,\n \"acc_norm\": 0.16544117647058823,\n \"acc_norm_stderr\": 0.022571771025494767\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.25,\n \"acc_stderr\": 0.01751781884501444,\n \"acc_norm\": 0.25,\n \"acc_norm_stderr\": 0.01751781884501444\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.21818181818181817,\n \"acc_stderr\": 0.03955932861795833,\n \"acc_norm\": 0.21818181818181817,\n \"acc_norm_stderr\": 0.03955932861795833\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.24081632653061225,\n \"acc_stderr\": 0.027372942201788163,\n \"acc_norm\": 0.24081632653061225,\n \"acc_norm_stderr\": 0.027372942201788163\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.24378109452736318,\n \"acc_stderr\": 0.03036049015401465,\n \"acc_norm\": 0.24378109452736318,\n \"acc_norm_stderr\": 0.03036049015401465\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.28,\n \"acc_stderr\": 0.04512608598542128,\n \"acc_norm\": 0.28,\n \"acc_norm_stderr\": 0.04512608598542128\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.28313253012048195,\n \"acc_stderr\": 0.03507295431370518,\n \"acc_norm\": 0.28313253012048195,\n \"acc_norm_stderr\": 0.03507295431370518\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.3216374269005848,\n \"acc_stderr\": 0.03582529442573122,\n \"acc_norm\": 0.3216374269005848,\n \"acc_norm_stderr\": 0.03582529442573122\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.2386780905752754,\n \"mc1_stderr\": 0.014922629695456416,\n \"mc2\": 0.49255427998260237,\n \"mc2_stderr\": 0.016706749580601723\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.4925019731649566,\n \"acc_stderr\": 0.014050905521228577\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.0,\n \"acc_stderr\": 0.0\n }\n}\n```", "repo_url": "https://huggingface.co/NobodyExistsOnTheInternet/code-llama-70b-python-instruct", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_02_02T07_32_36.212408", "path": ["**/details_harness|arc:challenge|25_2024-02-02T07-32-36.212408.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-02-02T07-32-36.212408.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_02_02T07_32_36.212408", "path": ["**/details_harness|gsm8k|5_2024-02-02T07-32-36.212408.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-02-02T07-32-36.212408.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_02_02T07_32_36.212408", "path": ["**/details_harness|hellaswag|10_2024-02-02T07-32-36.212408.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-02-02T07-32-36.212408.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_02_02T07_32_36.212408", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-02T07-32-36.212408.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-02-02T07-32-36.212408.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-02-02T07-32-36.212408.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-02-02T07-32-36.212408.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-02T07-32-36.212408.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-02-02T07-32-36.212408.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-02-02T07-32-36.212408.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-02-02T07-32-36.212408.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-02-02T07-32-36.212408.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-02-02T07-32-36.212408.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-02-02T07-32-36.212408.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-02-02T07-32-36.212408.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-02T07-32-36.212408.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-02-02T07-32-36.212408.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-02T07-32-36.212408.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-02T07-32-36.212408.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-02-02T07-32-36.212408.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-02-02T07-32-36.212408.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-02-02T07-32-36.212408.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-02T07-32-36.212408.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-02T07-32-36.212408.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-02T07-32-36.212408.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-02-02T07-32-36.212408.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-02T07-32-36.212408.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-02T07-32-36.212408.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-02T07-32-36.212408.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-02T07-32-36.212408.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-02-02T07-32-36.212408.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-02T07-32-36.212408.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-02T07-32-36.212408.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-02T07-32-36.212408.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-02T07-32-36.212408.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-02-02T07-32-36.212408.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-02-02T07-32-36.212408.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-02-02T07-32-36.212408.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-02-02T07-32-36.212408.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-02T07-32-36.212408.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-02-02T07-32-36.212408.parquet", "**/details_harness|hendrycksTest-management|5_2024-02-02T07-32-36.212408.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-02-02T07-32-36.212408.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-02-02T07-32-36.212408.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-02-02T07-32-36.212408.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-02-02T07-32-36.212408.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-02T07-32-36.212408.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-02-02T07-32-36.212408.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-02-02T07-32-36.212408.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-02-02T07-32-36.212408.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-02-02T07-32-36.212408.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-02-02T07-32-36.212408.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-02-02T07-32-36.212408.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-02-02T07-32-36.212408.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-02-02T07-32-36.212408.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-02-02T07-32-36.212408.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-02-02T07-32-36.212408.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-02T07-32-36.212408.parquet", "**/details_harness|hendrycksTest-virology|5_2024-02-02T07-32-36.212408.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-02-02T07-32-36.212408.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-02T07-32-36.212408.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-02-02T07-32-36.212408.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-02-02T07-32-36.212408.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-02-02T07-32-36.212408.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-02T07-32-36.212408.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-02-02T07-32-36.212408.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-02-02T07-32-36.212408.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-02-02T07-32-36.212408.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-02-02T07-32-36.212408.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-02-02T07-32-36.212408.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-02-02T07-32-36.212408.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-02-02T07-32-36.212408.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-02T07-32-36.212408.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-02-02T07-32-36.212408.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-02T07-32-36.212408.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-02T07-32-36.212408.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-02-02T07-32-36.212408.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-02-02T07-32-36.212408.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-02-02T07-32-36.212408.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-02T07-32-36.212408.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-02T07-32-36.212408.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-02T07-32-36.212408.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-02-02T07-32-36.212408.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-02T07-32-36.212408.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-02T07-32-36.212408.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-02T07-32-36.212408.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-02T07-32-36.212408.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-02-02T07-32-36.212408.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-02T07-32-36.212408.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-02T07-32-36.212408.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-02T07-32-36.212408.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-02T07-32-36.212408.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-02-02T07-32-36.212408.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-02-02T07-32-36.212408.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-02-02T07-32-36.212408.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-02-02T07-32-36.212408.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-02T07-32-36.212408.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-02-02T07-32-36.212408.parquet", "**/details_harness|hendrycksTest-management|5_2024-02-02T07-32-36.212408.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-02-02T07-32-36.212408.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-02-02T07-32-36.212408.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-02-02T07-32-36.212408.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-02-02T07-32-36.212408.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-02T07-32-36.212408.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-02-02T07-32-36.212408.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-02-02T07-32-36.212408.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-02-02T07-32-36.212408.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-02-02T07-32-36.212408.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-02-02T07-32-36.212408.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-02-02T07-32-36.212408.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-02-02T07-32-36.212408.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-02-02T07-32-36.212408.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-02-02T07-32-36.212408.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-02-02T07-32-36.212408.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-02T07-32-36.212408.parquet", "**/details_harness|hendrycksTest-virology|5_2024-02-02T07-32-36.212408.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-02-02T07-32-36.212408.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_02_02T07_32_36.212408", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-02T07-32-36.212408.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-02T07-32-36.212408.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_02_02T07_32_36.212408", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-02-02T07-32-36.212408.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-02-02T07-32-36.212408.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_02_02T07_32_36.212408", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-02-02T07-32-36.212408.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-02-02T07-32-36.212408.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_02_02T07_32_36.212408", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-02-02T07-32-36.212408.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-02-02T07-32-36.212408.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_02_02T07_32_36.212408", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-02T07-32-36.212408.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-02T07-32-36.212408.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_02_02T07_32_36.212408", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-02-02T07-32-36.212408.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-02-02T07-32-36.212408.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_02_02T07_32_36.212408", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-02-02T07-32-36.212408.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-02-02T07-32-36.212408.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_02_02T07_32_36.212408", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-02-02T07-32-36.212408.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-02-02T07-32-36.212408.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_02_02T07_32_36.212408", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-02-02T07-32-36.212408.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-02-02T07-32-36.212408.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_02_02T07_32_36.212408", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-02-02T07-32-36.212408.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-02-02T07-32-36.212408.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_02_02T07_32_36.212408", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-02-02T07-32-36.212408.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-02-02T07-32-36.212408.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_02_02T07_32_36.212408", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-02-02T07-32-36.212408.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-02-02T07-32-36.212408.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_02_02T07_32_36.212408", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-02T07-32-36.212408.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-02T07-32-36.212408.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_02_02T07_32_36.212408", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-02-02T07-32-36.212408.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-02-02T07-32-36.212408.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_02_02T07_32_36.212408", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-02T07-32-36.212408.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-02T07-32-36.212408.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_02_02T07_32_36.212408", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-02T07-32-36.212408.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-02T07-32-36.212408.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_02_02T07_32_36.212408", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-02-02T07-32-36.212408.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-02-02T07-32-36.212408.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_02_02T07_32_36.212408", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-02-02T07-32-36.212408.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-02-02T07-32-36.212408.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_02_02T07_32_36.212408", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-02-02T07-32-36.212408.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-02-02T07-32-36.212408.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_02_02T07_32_36.212408", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-02T07-32-36.212408.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-02T07-32-36.212408.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_02_02T07_32_36.212408", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-02T07-32-36.212408.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-02T07-32-36.212408.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_02_02T07_32_36.212408", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-02T07-32-36.212408.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-02T07-32-36.212408.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_02_02T07_32_36.212408", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-02-02T07-32-36.212408.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-02-02T07-32-36.212408.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_02_02T07_32_36.212408", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-02T07-32-36.212408.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-02T07-32-36.212408.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_02_02T07_32_36.212408", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-02T07-32-36.212408.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-02T07-32-36.212408.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_02_02T07_32_36.212408", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-02T07-32-36.212408.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-02T07-32-36.212408.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_02_02T07_32_36.212408", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-02T07-32-36.212408.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-02T07-32-36.212408.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_02_02T07_32_36.212408", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-02-02T07-32-36.212408.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-02-02T07-32-36.212408.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_02_02T07_32_36.212408", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-02T07-32-36.212408.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-02T07-32-36.212408.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_02_02T07_32_36.212408", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-02T07-32-36.212408.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-02T07-32-36.212408.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_02_02T07_32_36.212408", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-02T07-32-36.212408.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-02T07-32-36.212408.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_02_02T07_32_36.212408", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-02T07-32-36.212408.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-02T07-32-36.212408.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_02_02T07_32_36.212408", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-02-02T07-32-36.212408.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-02-02T07-32-36.212408.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_02_02T07_32_36.212408", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-02-02T07-32-36.212408.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-02-02T07-32-36.212408.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_02_02T07_32_36.212408", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-02-02T07-32-36.212408.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-02-02T07-32-36.212408.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_02_02T07_32_36.212408", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-02-02T07-32-36.212408.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-02-02T07-32-36.212408.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_02_02T07_32_36.212408", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-02T07-32-36.212408.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-02T07-32-36.212408.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_02_02T07_32_36.212408", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-02-02T07-32-36.212408.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-02-02T07-32-36.212408.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_02_02T07_32_36.212408", "path": ["**/details_harness|hendrycksTest-management|5_2024-02-02T07-32-36.212408.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-02-02T07-32-36.212408.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_02_02T07_32_36.212408", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-02-02T07-32-36.212408.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-02-02T07-32-36.212408.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_02_02T07_32_36.212408", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-02-02T07-32-36.212408.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-02-02T07-32-36.212408.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_02_02T07_32_36.212408", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-02-02T07-32-36.212408.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-02-02T07-32-36.212408.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_02_02T07_32_36.212408", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-02-02T07-32-36.212408.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-02-02T07-32-36.212408.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_02_02T07_32_36.212408", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-02T07-32-36.212408.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-02T07-32-36.212408.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_02_02T07_32_36.212408", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-02-02T07-32-36.212408.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-02-02T07-32-36.212408.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_02_02T07_32_36.212408", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-02-02T07-32-36.212408.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-02-02T07-32-36.212408.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_02_02T07_32_36.212408", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-02-02T07-32-36.212408.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-02-02T07-32-36.212408.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_02_02T07_32_36.212408", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-02-02T07-32-36.212408.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-02-02T07-32-36.212408.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_02_02T07_32_36.212408", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-02-02T07-32-36.212408.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-02-02T07-32-36.212408.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_02_02T07_32_36.212408", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-02-02T07-32-36.212408.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-02-02T07-32-36.212408.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_02_02T07_32_36.212408", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-02-02T07-32-36.212408.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-02-02T07-32-36.212408.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_02_02T07_32_36.212408", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-02-02T07-32-36.212408.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-02-02T07-32-36.212408.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_02_02T07_32_36.212408", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-02-02T07-32-36.212408.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-02-02T07-32-36.212408.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_02_02T07_32_36.212408", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-02-02T07-32-36.212408.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-02-02T07-32-36.212408.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_02_02T07_32_36.212408", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-02T07-32-36.212408.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-02T07-32-36.212408.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_02_02T07_32_36.212408", "path": ["**/details_harness|hendrycksTest-virology|5_2024-02-02T07-32-36.212408.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-02-02T07-32-36.212408.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_02_02T07_32_36.212408", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-02-02T07-32-36.212408.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-02-02T07-32-36.212408.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_02_02T07_32_36.212408", "path": ["**/details_harness|truthfulqa:mc|0_2024-02-02T07-32-36.212408.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-02-02T07-32-36.212408.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_02_02T07_32_36.212408", "path": ["**/details_harness|winogrande|5_2024-02-02T07-32-36.212408.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-02-02T07-32-36.212408.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_02_02T07_32_36.212408", "path": ["results_2024-02-02T07-32-36.212408.parquet"]}, {"split": "latest", "path": ["results_2024-02-02T07-32-36.212408.parquet"]}]}]}
2024-02-02T07:35:22+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of NobodyExistsOnTheInternet/code-llama-70b-python-instruct Dataset automatically created during the evaluation run of model NobodyExistsOnTheInternet/code-llama-70b-python-instruct on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-02-02T07:32:36.212408(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of NobodyExistsOnTheInternet/code-llama-70b-python-instruct\n\n\n\nDataset automatically created during the evaluation run of model NobodyExistsOnTheInternet/code-llama-70b-python-instruct on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-02-02T07:32:36.212408(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of NobodyExistsOnTheInternet/code-llama-70b-python-instruct\n\n\n\nDataset automatically created during the evaluation run of model NobodyExistsOnTheInternet/code-llama-70b-python-instruct on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-02-02T07:32:36.212408(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
6d7cbf6c64b23eeac0a99167ec7edddbd1d665f4
# FinTwitBERT: Synthetic Financial Tweets Dataset ## Description This dataset contains a collection of synthetically generated tweets related to financial markets, including discussions on stocks and cryptocurrencies. The tweets were generated using the `NousResearch/Nous-Hermes-2-Mixtral-8x7B-DPO` model, employing 10-shot random examples from the `TimKoornstra/financial-tweets-sentiment` dataset. Each entry in this dataset provides insights into financial discussions and is labeled with a sentiment value, aimed at supporting sentiment analysis in the financial domain. ## Dataset Structure Each record in the dataset is structured as follows: - **Tweet**: The text of the tweet, offering insights into discussions surrounding financial markets. - **Sentiment**: A numerical label indicating the sentiment of the tweet, with '1' for bullish, '2' for bearish, and '0' for neutral sentiments. ## Dataset Size The dataset comprises 831,530 tweets in total, categorized into: - 309,246 bearish tweets - 272,666 bullish tweets - 249,618 neutral tweets ## Preprocessing The dataset has undergone thorough preprocessing to ensure the quality and consistency of the data. This includes sentiment mapping to the numerical labels and the removal of duplicate entries. ## Disclaimer Given the synthetic nature of this dataset, users should be aware that it may contain shocking, incorrect, or bizarre content, including tweets that do not accurately match their sentiment labels. This dataset is generated through a machine learning model and as such, reflects the model's limitations in understanding complex human sentiments and contexts. Users are advised to apply caution and perform additional validations when using this dataset for research or applications. ## Usage This dataset is particularly suited for training and evaluating machine learning models focused on sentiment analysis within the financial sector. It can serve as a valuable resource for: - Academic research in financial sentiment analysis - Financial market trend analysis - Development of AI tools for financial institutions The structured sentiment labels make it ideal for supervised learning approaches aiming to understand market trends and investor sentiment. ## License This dataset is made available under the MIT License. ## Citation If you use this dataset in your research or applications, please cite it as follows: ``` @misc{FinTwitBERT, author = {Stephan Akkerman, Tim Koornstra}, title = {FinTwitBERT: A Specialized Language Model for Financial Tweets}, year = {2023}, publisher = {GitHub}, journal = {GitHub repository}, howpublished = {\url{https://github.com/TimKoornstra/FinTwitBERT}} } ``` Additionally, if you utilize the sentiment classifier trained on this dataset, please cite: ``` @misc{FinTwitBERT-sentiment, author = {Stephan Akkerman, Tim Koornstra}, title = {FinTwitBERT-sentiment: A Sentiment Classifier for Financial Tweets}, year = {2023}, publisher = {Hugging Face}, howpublished = {\url{https://huggingface.co/StephanAkkerman/FinTwitBERT-sentiment}} } ``` ## Contributions We welcome contributions to the dataset, including suggestions for improvements, reporting issues, and additional data. Please feel free to reach out or submit pull requests to the dataset repository.
TimKoornstra/synthetic-financial-tweets-sentiment
[ "task_categories:text-classification", "size_categories:100K<n<1M", "language:en", "license:mit", "synthetic", "tweet", "tweets", "sentiment", "classification", "mixtral", "FinTwitBERT", "FinTwitBERT-sentiment", "region:us" ]
2024-02-02T07:36:36+00:00
{"language": ["en"], "license": "mit", "size_categories": ["100K<n<1M"], "task_categories": ["text-classification"], "pretty_name": "Synthetic Financial Tweets with Sentiment", "dataset_info": {"features": [{"name": "tweet", "dtype": "string"}, {"name": "sentiment", "dtype": {"class_label": {"names": {"0": "neutral", "1": "bullish", "2": "bearish"}}}}], "splits": [{"name": "train", "num_bytes": 85168816, "num_examples": 831530}], "download_size": 47315785, "dataset_size": 85168816}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}], "tags": ["synthetic", "tweet", "tweets", "sentiment", "classification", "mixtral", "FinTwitBERT", "FinTwitBERT-sentiment"]}
2024-02-16T16:37:39+00:00
[]
[ "en" ]
TAGS #task_categories-text-classification #size_categories-100K<n<1M #language-English #license-mit #synthetic #tweet #tweets #sentiment #classification #mixtral #FinTwitBERT #FinTwitBERT-sentiment #region-us
# FinTwitBERT: Synthetic Financial Tweets Dataset ## Description This dataset contains a collection of synthetically generated tweets related to financial markets, including discussions on stocks and cryptocurrencies. The tweets were generated using the 'NousResearch/Nous-Hermes-2-Mixtral-8x7B-DPO' model, employing 10-shot random examples from the 'TimKoornstra/financial-tweets-sentiment' dataset. Each entry in this dataset provides insights into financial discussions and is labeled with a sentiment value, aimed at supporting sentiment analysis in the financial domain. ## Dataset Structure Each record in the dataset is structured as follows: - Tweet: The text of the tweet, offering insights into discussions surrounding financial markets. - Sentiment: A numerical label indicating the sentiment of the tweet, with '1' for bullish, '2' for bearish, and '0' for neutral sentiments. ## Dataset Size The dataset comprises 831,530 tweets in total, categorized into: - 309,246 bearish tweets - 272,666 bullish tweets - 249,618 neutral tweets ## Preprocessing The dataset has undergone thorough preprocessing to ensure the quality and consistency of the data. This includes sentiment mapping to the numerical labels and the removal of duplicate entries. ## Disclaimer Given the synthetic nature of this dataset, users should be aware that it may contain shocking, incorrect, or bizarre content, including tweets that do not accurately match their sentiment labels. This dataset is generated through a machine learning model and as such, reflects the model's limitations in understanding complex human sentiments and contexts. Users are advised to apply caution and perform additional validations when using this dataset for research or applications. ## Usage This dataset is particularly suited for training and evaluating machine learning models focused on sentiment analysis within the financial sector. It can serve as a valuable resource for: - Academic research in financial sentiment analysis - Financial market trend analysis - Development of AI tools for financial institutions The structured sentiment labels make it ideal for supervised learning approaches aiming to understand market trends and investor sentiment. ## License This dataset is made available under the MIT License. If you use this dataset in your research or applications, please cite it as follows: Additionally, if you utilize the sentiment classifier trained on this dataset, please cite: ## Contributions We welcome contributions to the dataset, including suggestions for improvements, reporting issues, and additional data. Please feel free to reach out or submit pull requests to the dataset repository.
[ "# FinTwitBERT: Synthetic Financial Tweets Dataset", "## Description\nThis dataset contains a collection of synthetically generated tweets related to financial markets, including discussions on stocks and cryptocurrencies. The tweets were generated using the 'NousResearch/Nous-Hermes-2-Mixtral-8x7B-DPO' model, employing 10-shot random examples from the 'TimKoornstra/financial-tweets-sentiment' dataset. Each entry in this dataset provides insights into financial discussions and is labeled with a sentiment value, aimed at supporting sentiment analysis in the financial domain.", "## Dataset Structure\nEach record in the dataset is structured as follows:\n- Tweet: The text of the tweet, offering insights into discussions surrounding financial markets.\n- Sentiment: A numerical label indicating the sentiment of the tweet, with '1' for bullish, '2' for bearish, and '0' for neutral sentiments.", "## Dataset Size\nThe dataset comprises 831,530 tweets in total, categorized into:\n- 309,246 bearish tweets\n- 272,666 bullish tweets\n- 249,618 neutral tweets", "## Preprocessing\nThe dataset has undergone thorough preprocessing to ensure the quality and consistency of the data. This includes sentiment mapping to the numerical labels and the removal of duplicate entries.", "## Disclaimer\nGiven the synthetic nature of this dataset, users should be aware that it may contain shocking, incorrect, or bizarre content, including tweets that do not accurately match their sentiment labels. This dataset is generated through a machine learning model and as such, reflects the model's limitations in understanding complex human sentiments and contexts. Users are advised to apply caution and perform additional validations when using this dataset for research or applications.", "## Usage\nThis dataset is particularly suited for training and evaluating machine learning models focused on sentiment analysis within the financial sector. It can serve as a valuable resource for:\n- Academic research in financial sentiment analysis\n- Financial market trend analysis\n- Development of AI tools for financial institutions\n\nThe structured sentiment labels make it ideal for supervised learning approaches aiming to understand market trends and investor sentiment.", "## License\nThis dataset is made available under the MIT License.\n\nIf you use this dataset in your research or applications, please cite it as follows:\n\n\n\nAdditionally, if you utilize the sentiment classifier trained on this dataset, please cite:", "## Contributions\nWe welcome contributions to the dataset, including suggestions for improvements, reporting issues, and additional data. Please feel free to reach out or submit pull requests to the dataset repository." ]
[ "TAGS\n#task_categories-text-classification #size_categories-100K<n<1M #language-English #license-mit #synthetic #tweet #tweets #sentiment #classification #mixtral #FinTwitBERT #FinTwitBERT-sentiment #region-us \n", "# FinTwitBERT: Synthetic Financial Tweets Dataset", "## Description\nThis dataset contains a collection of synthetically generated tweets related to financial markets, including discussions on stocks and cryptocurrencies. The tweets were generated using the 'NousResearch/Nous-Hermes-2-Mixtral-8x7B-DPO' model, employing 10-shot random examples from the 'TimKoornstra/financial-tweets-sentiment' dataset. Each entry in this dataset provides insights into financial discussions and is labeled with a sentiment value, aimed at supporting sentiment analysis in the financial domain.", "## Dataset Structure\nEach record in the dataset is structured as follows:\n- Tweet: The text of the tweet, offering insights into discussions surrounding financial markets.\n- Sentiment: A numerical label indicating the sentiment of the tweet, with '1' for bullish, '2' for bearish, and '0' for neutral sentiments.", "## Dataset Size\nThe dataset comprises 831,530 tweets in total, categorized into:\n- 309,246 bearish tweets\n- 272,666 bullish tweets\n- 249,618 neutral tweets", "## Preprocessing\nThe dataset has undergone thorough preprocessing to ensure the quality and consistency of the data. This includes sentiment mapping to the numerical labels and the removal of duplicate entries.", "## Disclaimer\nGiven the synthetic nature of this dataset, users should be aware that it may contain shocking, incorrect, or bizarre content, including tweets that do not accurately match their sentiment labels. This dataset is generated through a machine learning model and as such, reflects the model's limitations in understanding complex human sentiments and contexts. Users are advised to apply caution and perform additional validations when using this dataset for research or applications.", "## Usage\nThis dataset is particularly suited for training and evaluating machine learning models focused on sentiment analysis within the financial sector. It can serve as a valuable resource for:\n- Academic research in financial sentiment analysis\n- Financial market trend analysis\n- Development of AI tools for financial institutions\n\nThe structured sentiment labels make it ideal for supervised learning approaches aiming to understand market trends and investor sentiment.", "## License\nThis dataset is made available under the MIT License.\n\nIf you use this dataset in your research or applications, please cite it as follows:\n\n\n\nAdditionally, if you utilize the sentiment classifier trained on this dataset, please cite:", "## Contributions\nWe welcome contributions to the dataset, including suggestions for improvements, reporting issues, and additional data. Please feel free to reach out or submit pull requests to the dataset repository." ]
b396f0bf4e69d0c1ebec046f00f85e1a8882d313
# Dataset of Isla (Plastic Memories) This is the dataset of Isla (Plastic Memories), containing 590 images and their tags. The core tags of this character are `long_hair, red_eyes, ahoge, white_hair, twintails, hair_between_eyes`, which are pruned in this dataset. Images are crawled from many sites (e.g. danbooru, pixiv, zerochan ...), the auto-crawling system is powered by [DeepGHS Team](https://github.com/deepghs)([huggingface organization](https://huggingface.co/deepghs)). ## List of Packages | Name | Images | Size | Download | Type | Description | |:-----------------|---------:|:-----------|:----------------------------------------------------------------------------------------------------------------------|:-----------|:---------------------------------------------------------------------| | raw | 590 | 520.54 MiB | [Download](https://huggingface.co/datasets/CyberHarem/isla_plasticmemories/resolve/main/dataset-raw.zip) | Waifuc-Raw | Raw data with meta information (min edge aligned to 1400 if larger). | | 800 | 590 | 389.96 MiB | [Download](https://huggingface.co/datasets/CyberHarem/isla_plasticmemories/resolve/main/dataset-800.zip) | IMG+TXT | dataset with the shorter side not exceeding 800 pixels. | | stage3-p480-800 | 1134 | 704.12 MiB | [Download](https://huggingface.co/datasets/CyberHarem/isla_plasticmemories/resolve/main/dataset-stage3-p480-800.zip) | IMG+TXT | 3-stage cropped dataset with the area not less than 480x480 pixels. | | 1200 | 590 | 520.35 MiB | [Download](https://huggingface.co/datasets/CyberHarem/isla_plasticmemories/resolve/main/dataset-1200.zip) | IMG+TXT | dataset with the shorter side not exceeding 1200 pixels. | | stage3-p480-1200 | 1134 | 912.41 MiB | [Download](https://huggingface.co/datasets/CyberHarem/isla_plasticmemories/resolve/main/dataset-stage3-p480-1200.zip) | IMG+TXT | 3-stage cropped dataset with the area not less than 480x480 pixels. | ### Load Raw Dataset with Waifuc We provide raw dataset (including tagged images) for [waifuc](https://deepghs.github.io/waifuc/main/tutorials/installation/index.html) loading. If you need this, just run the following code ```python import os import zipfile from huggingface_hub import hf_hub_download from waifuc.source import LocalSource # download raw archive file zip_file = hf_hub_download( repo_id='CyberHarem/isla_plasticmemories', repo_type='dataset', filename='dataset-raw.zip', ) # extract files to your directory dataset_dir = 'dataset_dir' os.makedirs(dataset_dir, exist_ok=True) with zipfile.ZipFile(zip_file, 'r') as zf: zf.extractall(dataset_dir) # load the dataset with waifuc source = LocalSource(dataset_dir) for item in source: print(item.image, item.meta['filename'], item.meta['tags']) ``` ## List of Clusters List of tag clustering result, maybe some outfits can be mined here. ### Raw Text Version | # | Samples | Img-1 | Img-2 | Img-3 | Img-4 | Img-5 | Tags | |----:|----------:|:----------------------------------|:----------------------------------|:----------------------------------|:----------------------------------|:----------------------------------|:-------------------------------------------------------------------------------------------------------------------| | 0 | 11 | ![](samples/0/clu0-sample0.png) | ![](samples/0/clu0-sample1.png) | ![](samples/0/clu0-sample2.png) | ![](samples/0/clu0-sample3.png) | ![](samples/0/clu0-sample4.png) | 1girl, anime_coloring, solo, blush, ponytail, portrait, parody | | 1 | 14 | ![](samples/1/clu1-sample0.png) | ![](samples/1/clu1-sample1.png) | ![](samples/1/clu1-sample2.png) | ![](samples/1/clu1-sample3.png) | ![](samples/1/clu1-sample4.png) | 1girl, anime_coloring, portrait, solo, blush, bangs, close-up, parody, blurry_background, closed_mouth, open_mouth | | 2 | 6 | ![](samples/2/clu2-sample0.png) | ![](samples/2/clu2-sample1.png) | ![](samples/2/clu2-sample2.png) | ![](samples/2/clu2-sample3.png) | ![](samples/2/clu2-sample4.png) | 1girl, bangs, blush, closed_mouth, portrait, solo, smile, anime_coloring, looking_at_viewer | | 3 | 5 | ![](samples/3/clu3-sample0.png) | ![](samples/3/clu3-sample1.png) | ![](samples/3/clu3-sample2.png) | ![](samples/3/clu3-sample3.png) | ![](samples/3/clu3-sample4.png) | 1girl, solo | | 4 | 17 | ![](samples/4/clu4-sample0.png) | ![](samples/4/clu4-sample1.png) | ![](samples/4/clu4-sample2.png) | ![](samples/4/clu4-sample3.png) | ![](samples/4/clu4-sample4.png) | 1girl, red_necktie, solo, looking_at_viewer, parody, uniform, grey_hair, portrait, blush | | 5 | 9 | ![](samples/5/clu5-sample0.png) | ![](samples/5/clu5-sample1.png) | ![](samples/5/clu5-sample2.png) | ![](samples/5/clu5-sample3.png) | ![](samples/5/clu5-sample4.png) | 1girl, detached_sleeves, necktie, solo, uniform, parody, upper_body | | 6 | 5 | ![](samples/6/clu6-sample0.png) | ![](samples/6/clu6-sample1.png) | ![](samples/6/clu6-sample2.png) | ![](samples/6/clu6-sample3.png) | ![](samples/6/clu6-sample4.png) | 1girl, detached_sleeves, employee_uniform, solo, blush, necktie, parody, looking_at_viewer | | 7 | 6 | ![](samples/7/clu7-sample0.png) | ![](samples/7/clu7-sample1.png) | ![](samples/7/clu7-sample2.png) | ![](samples/7/clu7-sample3.png) | ![](samples/7/clu7-sample4.png) | 1girl, detached_sleeves, employee_uniform, grey_hair, ponytail, solo, id_card | | 8 | 5 | ![](samples/8/clu8-sample0.png) | ![](samples/8/clu8-sample1.png) | ![](samples/8/clu8-sample2.png) | ![](samples/8/clu8-sample3.png) | ![](samples/8/clu8-sample4.png) | 1girl, sitting, solo, barefoot, grey_hair, hugging_own_legs, indoors, ponytail, sleeveless, parody, profile | | 9 | 8 | ![](samples/9/clu9-sample0.png) | ![](samples/9/clu9-sample1.png) | ![](samples/9/clu9-sample2.png) | ![](samples/9/clu9-sample3.png) | ![](samples/9/clu9-sample4.png) | 2girls, school_uniform, collarbone, blush, orange_hair, anime_coloring, sleeveless | | 10 | 5 | ![](samples/10/clu10-sample0.png) | ![](samples/10/clu10-sample1.png) | ![](samples/10/clu10-sample2.png) | ![](samples/10/clu10-sample3.png) | ![](samples/10/clu10-sample4.png) | 2girls, blurry, closed_mouth, orange_hair, pajamas, bangs, blush, shirt, solo_focus | ### Table Version | # | Samples | Img-1 | Img-2 | Img-3 | Img-4 | Img-5 | 1girl | anime_coloring | solo | blush | ponytail | portrait | parody | bangs | close-up | blurry_background | closed_mouth | open_mouth | smile | looking_at_viewer | red_necktie | uniform | grey_hair | detached_sleeves | necktie | upper_body | employee_uniform | id_card | sitting | barefoot | hugging_own_legs | indoors | sleeveless | profile | 2girls | school_uniform | collarbone | orange_hair | blurry | pajamas | shirt | solo_focus | |----:|----------:|:----------------------------------|:----------------------------------|:----------------------------------|:----------------------------------|:----------------------------------|:--------|:-----------------|:-------|:--------|:-----------|:-----------|:---------|:--------|:-----------|:--------------------|:---------------|:-------------|:--------|:--------------------|:--------------|:----------|:------------|:-------------------|:----------|:-------------|:-------------------|:----------|:----------|:-----------|:-------------------|:----------|:-------------|:----------|:---------|:-----------------|:-------------|:--------------|:---------|:----------|:--------|:-------------| | 0 | 11 | ![](samples/0/clu0-sample0.png) | ![](samples/0/clu0-sample1.png) | ![](samples/0/clu0-sample2.png) | ![](samples/0/clu0-sample3.png) | ![](samples/0/clu0-sample4.png) | X | X | X | X | X | X | X | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | 1 | 14 | ![](samples/1/clu1-sample0.png) | ![](samples/1/clu1-sample1.png) | ![](samples/1/clu1-sample2.png) | ![](samples/1/clu1-sample3.png) | ![](samples/1/clu1-sample4.png) | X | X | X | X | | X | X | X | X | X | X | X | | | | | | | | | | | | | | | | | | | | | | | | | | 2 | 6 | ![](samples/2/clu2-sample0.png) | ![](samples/2/clu2-sample1.png) | ![](samples/2/clu2-sample2.png) | ![](samples/2/clu2-sample3.png) | ![](samples/2/clu2-sample4.png) | X | X | X | X | | X | | X | | | X | | X | X | | | | | | | | | | | | | | | | | | | | | | | | 3 | 5 | ![](samples/3/clu3-sample0.png) | ![](samples/3/clu3-sample1.png) | ![](samples/3/clu3-sample2.png) | ![](samples/3/clu3-sample3.png) | ![](samples/3/clu3-sample4.png) | X | | X | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | 4 | 17 | ![](samples/4/clu4-sample0.png) | ![](samples/4/clu4-sample1.png) | ![](samples/4/clu4-sample2.png) | ![](samples/4/clu4-sample3.png) | ![](samples/4/clu4-sample4.png) | X | | X | X | | X | X | | | | | | | X | X | X | X | | | | | | | | | | | | | | | | | | | | | 5 | 9 | ![](samples/5/clu5-sample0.png) | ![](samples/5/clu5-sample1.png) | ![](samples/5/clu5-sample2.png) | ![](samples/5/clu5-sample3.png) | ![](samples/5/clu5-sample4.png) | X | | X | | | | X | | | | | | | | | X | | X | X | X | | | | | | | | | | | | | | | | | | 6 | 5 | ![](samples/6/clu6-sample0.png) | ![](samples/6/clu6-sample1.png) | ![](samples/6/clu6-sample2.png) | ![](samples/6/clu6-sample3.png) | ![](samples/6/clu6-sample4.png) | X | | X | X | | | X | | | | | | | X | | | | X | X | | X | | | | | | | | | | | | | | | | | 7 | 6 | ![](samples/7/clu7-sample0.png) | ![](samples/7/clu7-sample1.png) | ![](samples/7/clu7-sample2.png) | ![](samples/7/clu7-sample3.png) | ![](samples/7/clu7-sample4.png) | X | | X | | X | | | | | | | | | | | | X | X | | | X | X | | | | | | | | | | | | | | | | 8 | 5 | ![](samples/8/clu8-sample0.png) | ![](samples/8/clu8-sample1.png) | ![](samples/8/clu8-sample2.png) | ![](samples/8/clu8-sample3.png) | ![](samples/8/clu8-sample4.png) | X | | X | | X | | X | | | | | | | | | | X | | | | | | X | X | X | X | X | X | | | | | | | | | | 9 | 8 | ![](samples/9/clu9-sample0.png) | ![](samples/9/clu9-sample1.png) | ![](samples/9/clu9-sample2.png) | ![](samples/9/clu9-sample3.png) | ![](samples/9/clu9-sample4.png) | | X | | X | | | | | | | | | | | | | | | | | | | | | | | X | | X | X | X | X | | | | | | 10 | 5 | ![](samples/10/clu10-sample0.png) | ![](samples/10/clu10-sample1.png) | ![](samples/10/clu10-sample2.png) | ![](samples/10/clu10-sample3.png) | ![](samples/10/clu10-sample4.png) | | | | X | | | | X | | | X | | | | | | | | | | | | | | | | | | X | | | X | X | X | X | X |
CyberHarem/isla_plasticmemories
[ "task_categories:text-to-image", "size_categories:n<1K", "license:mit", "art", "not-for-all-audiences", "region:us" ]
2024-02-02T07:42:51+00:00
{"license": "mit", "size_categories": ["n<1K"], "task_categories": ["text-to-image"], "tags": ["art", "not-for-all-audiences"]}
2024-02-02T08:49:11+00:00
[]
[]
TAGS #task_categories-text-to-image #size_categories-n<1K #license-mit #art #not-for-all-audiences #region-us
Dataset of Isla (Plastic Memories) ================================== This is the dataset of Isla (Plastic Memories), containing 590 images and their tags. The core tags of this character are 'long\_hair, red\_eyes, ahoge, white\_hair, twintails, hair\_between\_eyes', which are pruned in this dataset. Images are crawled from many sites (e.g. danbooru, pixiv, zerochan ...), the auto-crawling system is powered by DeepGHS Team(huggingface organization). List of Packages ---------------- ### Load Raw Dataset with Waifuc We provide raw dataset (including tagged images) for waifuc loading. If you need this, just run the following code List of Clusters ---------------- List of tag clustering result, maybe some outfits can be mined here. ### Raw Text Version ### Table Version
[ "### Load Raw Dataset with Waifuc\n\n\nWe provide raw dataset (including tagged images) for waifuc loading. If you need this, just run the following code\n\n\nList of Clusters\n----------------\n\n\nList of tag clustering result, maybe some outfits can be mined here.", "### Raw Text Version", "### Table Version" ]
[ "TAGS\n#task_categories-text-to-image #size_categories-n<1K #license-mit #art #not-for-all-audiences #region-us \n", "### Load Raw Dataset with Waifuc\n\n\nWe provide raw dataset (including tagged images) for waifuc loading. If you need this, just run the following code\n\n\nList of Clusters\n----------------\n\n\nList of tag clustering result, maybe some outfits can be mined here.", "### Raw Text Version", "### Table Version" ]
99e3f0dccaba0ee003d7ede5772223ec60973bf9
# Dataset of Kazuki Kuwanomi (Plastic Memories) This is the dataset of Kazuki Kuwanomi (Plastic Memories), containing 139 images and their tags. The core tags of this character are `red_hair, purple_eyes, folded_ponytail`, which are pruned in this dataset. Images are crawled from many sites (e.g. danbooru, pixiv, zerochan ...), the auto-crawling system is powered by [DeepGHS Team](https://github.com/deepghs)([huggingface organization](https://huggingface.co/deepghs)). ## List of Packages | Name | Images | Size | Download | Type | Description | |:-----------------|---------:|:-----------|:---------------------------------------------------------------------------------------------------------------------------------|:-----------|:---------------------------------------------------------------------| | raw | 139 | 103.56 MiB | [Download](https://huggingface.co/datasets/CyberHarem/kazuki_kuwanomi_plasticmemories/resolve/main/dataset-raw.zip) | Waifuc-Raw | Raw data with meta information (min edge aligned to 1400 if larger). | | 800 | 139 | 82.77 MiB | [Download](https://huggingface.co/datasets/CyberHarem/kazuki_kuwanomi_plasticmemories/resolve/main/dataset-800.zip) | IMG+TXT | dataset with the shorter side not exceeding 800 pixels. | | stage3-p480-800 | 283 | 154.56 MiB | [Download](https://huggingface.co/datasets/CyberHarem/kazuki_kuwanomi_plasticmemories/resolve/main/dataset-stage3-p480-800.zip) | IMG+TXT | 3-stage cropped dataset with the area not less than 480x480 pixels. | | 1200 | 139 | 103.52 MiB | [Download](https://huggingface.co/datasets/CyberHarem/kazuki_kuwanomi_plasticmemories/resolve/main/dataset-1200.zip) | IMG+TXT | dataset with the shorter side not exceeding 1200 pixels. | | stage3-p480-1200 | 283 | 185.52 MiB | [Download](https://huggingface.co/datasets/CyberHarem/kazuki_kuwanomi_plasticmemories/resolve/main/dataset-stage3-p480-1200.zip) | IMG+TXT | 3-stage cropped dataset with the area not less than 480x480 pixels. | ### Load Raw Dataset with Waifuc We provide raw dataset (including tagged images) for [waifuc](https://deepghs.github.io/waifuc/main/tutorials/installation/index.html) loading. If you need this, just run the following code ```python import os import zipfile from huggingface_hub import hf_hub_download from waifuc.source import LocalSource # download raw archive file zip_file = hf_hub_download( repo_id='CyberHarem/kazuki_kuwanomi_plasticmemories', repo_type='dataset', filename='dataset-raw.zip', ) # extract files to your directory dataset_dir = 'dataset_dir' os.makedirs(dataset_dir, exist_ok=True) with zipfile.ZipFile(zip_file, 'r') as zf: zf.extractall(dataset_dir) # load the dataset with waifuc source = LocalSource(dataset_dir) for item in source: print(item.image, item.meta['filename'], item.meta['tags']) ``` ## List of Clusters List of tag clustering result, maybe some outfits can be mined here. ### Raw Text Version | # | Samples | Img-1 | Img-2 | Img-3 | Img-4 | Img-5 | Tags | |----:|----------:|:--------------------------------|:--------------------------------|:--------------------------------|:--------------------------------|:--------------------------------|:------------------------------------------------------------------------------------------------------------------------------------| | 0 | 7 | ![](samples/0/clu0-sample0.png) | ![](samples/0/clu0-sample1.png) | ![](samples/0/clu0-sample2.png) | ![](samples/0/clu0-sample3.png) | ![](samples/0/clu0-sample4.png) | 1girl, blush, wine_glass, food, solo_focus | | 1 | 10 | ![](samples/1/clu1-sample0.png) | ![](samples/1/clu1-sample1.png) | ![](samples/1/clu1-sample2.png) | ![](samples/1/clu1-sample3.png) | ![](samples/1/clu1-sample4.png) | 1girl, solo, hair_between_eyes, portrait, closed_mouth, anime_coloring, looking_at_viewer, bangs | | 2 | 5 | ![](samples/2/clu2-sample0.png) | ![](samples/2/clu2-sample1.png) | ![](samples/2/clu2-sample2.png) | ![](samples/2/clu2-sample3.png) | ![](samples/2/clu2-sample4.png) | 1girl, anime_coloring, looking_at_viewer, parody, solo, open_mouth, bangs | | 3 | 5 | ![](samples/3/clu3-sample0.png) | ![](samples/3/clu3-sample1.png) | ![](samples/3/clu3-sample2.png) | ![](samples/3/clu3-sample3.png) | ![](samples/3/clu3-sample4.png) | 1girl, closed_mouth, looking_at_viewer, solo, collarbone, hair_between_eyes, upper_body, short_hair, sleeveless_shirt, yellow_shirt | | 4 | 17 | ![](samples/4/clu4-sample0.png) | ![](samples/4/clu4-sample1.png) | ![](samples/4/clu4-sample2.png) | ![](samples/4/clu4-sample3.png) | ![](samples/4/clu4-sample4.png) | 1girl, solo, detached_sleeves, sitting, short_hair | | 5 | 5 | ![](samples/5/clu5-sample0.png) | ![](samples/5/clu5-sample1.png) | ![](samples/5/clu5-sample2.png) | ![](samples/5/clu5-sample3.png) | ![](samples/5/clu5-sample4.png) | 1girl, crossed_arms, solo, necktie, school_uniform, sleeveless, breasts, looking_at_viewer, upper_body | ### Table Version | # | Samples | Img-1 | Img-2 | Img-3 | Img-4 | Img-5 | 1girl | blush | wine_glass | food | solo_focus | solo | hair_between_eyes | portrait | closed_mouth | anime_coloring | looking_at_viewer | bangs | parody | open_mouth | collarbone | upper_body | short_hair | sleeveless_shirt | yellow_shirt | detached_sleeves | sitting | crossed_arms | necktie | school_uniform | sleeveless | breasts | |----:|----------:|:--------------------------------|:--------------------------------|:--------------------------------|:--------------------------------|:--------------------------------|:--------|:--------|:-------------|:-------|:-------------|:-------|:--------------------|:-----------|:---------------|:-----------------|:--------------------|:--------|:---------|:-------------|:-------------|:-------------|:-------------|:-------------------|:---------------|:-------------------|:----------|:---------------|:----------|:-----------------|:-------------|:----------| | 0 | 7 | ![](samples/0/clu0-sample0.png) | ![](samples/0/clu0-sample1.png) | ![](samples/0/clu0-sample2.png) | ![](samples/0/clu0-sample3.png) | ![](samples/0/clu0-sample4.png) | X | X | X | X | X | | | | | | | | | | | | | | | | | | | | | | | 1 | 10 | ![](samples/1/clu1-sample0.png) | ![](samples/1/clu1-sample1.png) | ![](samples/1/clu1-sample2.png) | ![](samples/1/clu1-sample3.png) | ![](samples/1/clu1-sample4.png) | X | | | | | X | X | X | X | X | X | X | | | | | | | | | | | | | | | | 2 | 5 | ![](samples/2/clu2-sample0.png) | ![](samples/2/clu2-sample1.png) | ![](samples/2/clu2-sample2.png) | ![](samples/2/clu2-sample3.png) | ![](samples/2/clu2-sample4.png) | X | | | | | X | | | | X | X | X | X | X | | | | | | | | | | | | | | 3 | 5 | ![](samples/3/clu3-sample0.png) | ![](samples/3/clu3-sample1.png) | ![](samples/3/clu3-sample2.png) | ![](samples/3/clu3-sample3.png) | ![](samples/3/clu3-sample4.png) | X | | | | | X | X | | X | | X | | | | X | X | X | X | X | | | | | | | | | 4 | 17 | ![](samples/4/clu4-sample0.png) | ![](samples/4/clu4-sample1.png) | ![](samples/4/clu4-sample2.png) | ![](samples/4/clu4-sample3.png) | ![](samples/4/clu4-sample4.png) | X | | | | | X | | | | | | | | | | | X | | | X | X | | | | | | | 5 | 5 | ![](samples/5/clu5-sample0.png) | ![](samples/5/clu5-sample1.png) | ![](samples/5/clu5-sample2.png) | ![](samples/5/clu5-sample3.png) | ![](samples/5/clu5-sample4.png) | X | | | | | X | | | | | X | | | | | X | | | | | | X | X | X | X | X |
CyberHarem/kazuki_kuwanomi_plasticmemories
[ "task_categories:text-to-image", "size_categories:n<1K", "license:mit", "art", "not-for-all-audiences", "region:us" ]
2024-02-02T07:43:36+00:00
{"license": "mit", "size_categories": ["n<1K"], "task_categories": ["text-to-image"], "tags": ["art", "not-for-all-audiences"]}
2024-02-02T07:58:00+00:00
[]
[]
TAGS #task_categories-text-to-image #size_categories-n<1K #license-mit #art #not-for-all-audiences #region-us
Dataset of Kazuki Kuwanomi (Plastic Memories) ============================================= This is the dataset of Kazuki Kuwanomi (Plastic Memories), containing 139 images and their tags. The core tags of this character are 'red\_hair, purple\_eyes, folded\_ponytail', which are pruned in this dataset. Images are crawled from many sites (e.g. danbooru, pixiv, zerochan ...), the auto-crawling system is powered by DeepGHS Team(huggingface organization). List of Packages ---------------- ### Load Raw Dataset with Waifuc We provide raw dataset (including tagged images) for waifuc loading. If you need this, just run the following code List of Clusters ---------------- List of tag clustering result, maybe some outfits can be mined here. ### Raw Text Version ### Table Version
[ "### Load Raw Dataset with Waifuc\n\n\nWe provide raw dataset (including tagged images) for waifuc loading. If you need this, just run the following code\n\n\nList of Clusters\n----------------\n\n\nList of tag clustering result, maybe some outfits can be mined here.", "### Raw Text Version", "### Table Version" ]
[ "TAGS\n#task_categories-text-to-image #size_categories-n<1K #license-mit #art #not-for-all-audiences #region-us \n", "### Load Raw Dataset with Waifuc\n\n\nWe provide raw dataset (including tagged images) for waifuc loading. If you need this, just run the following code\n\n\nList of Clusters\n----------------\n\n\nList of tag clustering result, maybe some outfits can be mined here.", "### Raw Text Version", "### Table Version" ]
88ab82f6d1fa5b37677d079d8efbec205eb67737
# Dataset of Michiru Kinushima (Plastic Memories) This is the dataset of Michiru Kinushima (Plastic Memories), containing 168 images and their tags. The core tags of this character are `orange_hair, long_hair, blue_eyes, bangs`, which are pruned in this dataset. Images are crawled from many sites (e.g. danbooru, pixiv, zerochan ...), the auto-crawling system is powered by [DeepGHS Team](https://github.com/deepghs)([huggingface organization](https://huggingface.co/deepghs)). ## List of Packages | Name | Images | Size | Download | Type | Description | |:-----------------|---------:|:-----------|:-----------------------------------------------------------------------------------------------------------------------------------|:-----------|:---------------------------------------------------------------------| | raw | 168 | 146.48 MiB | [Download](https://huggingface.co/datasets/CyberHarem/michiru_kinushima_plasticmemories/resolve/main/dataset-raw.zip) | Waifuc-Raw | Raw data with meta information (min edge aligned to 1400 if larger). | | 800 | 168 | 113.49 MiB | [Download](https://huggingface.co/datasets/CyberHarem/michiru_kinushima_plasticmemories/resolve/main/dataset-800.zip) | IMG+TXT | dataset with the shorter side not exceeding 800 pixels. | | stage3-p480-800 | 347 | 220.04 MiB | [Download](https://huggingface.co/datasets/CyberHarem/michiru_kinushima_plasticmemories/resolve/main/dataset-stage3-p480-800.zip) | IMG+TXT | 3-stage cropped dataset with the area not less than 480x480 pixels. | | 1200 | 168 | 146.42 MiB | [Download](https://huggingface.co/datasets/CyberHarem/michiru_kinushima_plasticmemories/resolve/main/dataset-1200.zip) | IMG+TXT | dataset with the shorter side not exceeding 1200 pixels. | | stage3-p480-1200 | 347 | 271.38 MiB | [Download](https://huggingface.co/datasets/CyberHarem/michiru_kinushima_plasticmemories/resolve/main/dataset-stage3-p480-1200.zip) | IMG+TXT | 3-stage cropped dataset with the area not less than 480x480 pixels. | ### Load Raw Dataset with Waifuc We provide raw dataset (including tagged images) for [waifuc](https://deepghs.github.io/waifuc/main/tutorials/installation/index.html) loading. If you need this, just run the following code ```python import os import zipfile from huggingface_hub import hf_hub_download from waifuc.source import LocalSource # download raw archive file zip_file = hf_hub_download( repo_id='CyberHarem/michiru_kinushima_plasticmemories', repo_type='dataset', filename='dataset-raw.zip', ) # extract files to your directory dataset_dir = 'dataset_dir' os.makedirs(dataset_dir, exist_ok=True) with zipfile.ZipFile(zip_file, 'r') as zf: zf.extractall(dataset_dir) # load the dataset with waifuc source = LocalSource(dataset_dir) for item in source: print(item.image, item.meta['filename'], item.meta['tags']) ``` ## List of Clusters List of tag clustering result, maybe some outfits can be mined here. ### Raw Text Version | # | Samples | Img-1 | Img-2 | Img-3 | Img-4 | Img-5 | Tags | |----:|----------:|:--------------------------------|:--------------------------------|:--------------------------------|:--------------------------------|:--------------------------------|:------------------------------------------------------------------------------------------------------------------------------------| | 0 | 37 | ![](samples/0/clu0-sample0.png) | ![](samples/0/clu0-sample1.png) | ![](samples/0/clu0-sample2.png) | ![](samples/0/clu0-sample3.png) | ![](samples/0/clu0-sample4.png) | 1girl, solo, school_uniform, upper_body, sleeveless_shirt, blush, pink_sailor_collar, looking_at_viewer, closed_mouth, pink_necktie | | 1 | 6 | ![](samples/1/clu1-sample0.png) | ![](samples/1/clu1-sample1.png) | ![](samples/1/clu1-sample2.png) | ![](samples/1/clu1-sample3.png) | ![](samples/1/clu1-sample4.png) | 1girl, hair_ornament, solo, window, detached_sleeves | | 2 | 10 | ![](samples/2/clu2-sample0.png) | ![](samples/2/clu2-sample1.png) | ![](samples/2/clu2-sample2.png) | ![](samples/2/clu2-sample3.png) | ![](samples/2/clu2-sample4.png) | 1girl, detached_sleeves, solo, looking_at_viewer, crossed_arms | | 3 | 6 | ![](samples/3/clu3-sample0.png) | ![](samples/3/clu3-sample1.png) | ![](samples/3/clu3-sample2.png) | ![](samples/3/clu3-sample3.png) | ![](samples/3/clu3-sample4.png) | 1girl, glasses, under-rim_eyewear, red-framed_eyewear, solo, parody, tears | ### Table Version | # | Samples | Img-1 | Img-2 | Img-3 | Img-4 | Img-5 | 1girl | solo | school_uniform | upper_body | sleeveless_shirt | blush | pink_sailor_collar | looking_at_viewer | closed_mouth | pink_necktie | hair_ornament | window | detached_sleeves | crossed_arms | glasses | under-rim_eyewear | red-framed_eyewear | parody | tears | |----:|----------:|:--------------------------------|:--------------------------------|:--------------------------------|:--------------------------------|:--------------------------------|:--------|:-------|:-----------------|:-------------|:-------------------|:--------|:---------------------|:--------------------|:---------------|:---------------|:----------------|:---------|:-------------------|:---------------|:----------|:--------------------|:---------------------|:---------|:--------| | 0 | 37 | ![](samples/0/clu0-sample0.png) | ![](samples/0/clu0-sample1.png) | ![](samples/0/clu0-sample2.png) | ![](samples/0/clu0-sample3.png) | ![](samples/0/clu0-sample4.png) | X | X | X | X | X | X | X | X | X | X | | | | | | | | | | | 1 | 6 | ![](samples/1/clu1-sample0.png) | ![](samples/1/clu1-sample1.png) | ![](samples/1/clu1-sample2.png) | ![](samples/1/clu1-sample3.png) | ![](samples/1/clu1-sample4.png) | X | X | | | | | | | | | X | X | X | | | | | | | | 2 | 10 | ![](samples/2/clu2-sample0.png) | ![](samples/2/clu2-sample1.png) | ![](samples/2/clu2-sample2.png) | ![](samples/2/clu2-sample3.png) | ![](samples/2/clu2-sample4.png) | X | X | | | | | | X | | | | | X | X | | | | | | | 3 | 6 | ![](samples/3/clu3-sample0.png) | ![](samples/3/clu3-sample1.png) | ![](samples/3/clu3-sample2.png) | ![](samples/3/clu3-sample3.png) | ![](samples/3/clu3-sample4.png) | X | X | | | | | | | | | | | | | X | X | X | X | X |
CyberHarem/michiru_kinushima_plasticmemories
[ "task_categories:text-to-image", "size_categories:n<1K", "license:mit", "art", "not-for-all-audiences", "region:us" ]
2024-02-02T07:45:03+00:00
{"license": "mit", "size_categories": ["n<1K"], "task_categories": ["text-to-image"], "tags": ["art", "not-for-all-audiences"]}
2024-02-02T08:03:11+00:00
[]
[]
TAGS #task_categories-text-to-image #size_categories-n<1K #license-mit #art #not-for-all-audiences #region-us
Dataset of Michiru Kinushima (Plastic Memories) =============================================== This is the dataset of Michiru Kinushima (Plastic Memories), containing 168 images and their tags. The core tags of this character are 'orange\_hair, long\_hair, blue\_eyes, bangs', which are pruned in this dataset. Images are crawled from many sites (e.g. danbooru, pixiv, zerochan ...), the auto-crawling system is powered by DeepGHS Team(huggingface organization). List of Packages ---------------- ### Load Raw Dataset with Waifuc We provide raw dataset (including tagged images) for waifuc loading. If you need this, just run the following code List of Clusters ---------------- List of tag clustering result, maybe some outfits can be mined here. ### Raw Text Version ### Table Version
[ "### Load Raw Dataset with Waifuc\n\n\nWe provide raw dataset (including tagged images) for waifuc loading. If you need this, just run the following code\n\n\nList of Clusters\n----------------\n\n\nList of tag clustering result, maybe some outfits can be mined here.", "### Raw Text Version", "### Table Version" ]
[ "TAGS\n#task_categories-text-to-image #size_categories-n<1K #license-mit #art #not-for-all-audiences #region-us \n", "### Load Raw Dataset with Waifuc\n\n\nWe provide raw dataset (including tagged images) for waifuc loading. If you need this, just run the following code\n\n\nList of Clusters\n----------------\n\n\nList of tag clustering result, maybe some outfits can be mined here.", "### Raw Text Version", "### Table Version" ]
68f6607e1bfeb5451dd2ceaae2db852421b81e84
# Dataset Card for Evaluation run of hfl/chinese-mixtral <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [hfl/chinese-mixtral](https://huggingface.co/hfl/chinese-mixtral) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_hfl__chinese-mixtral", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-02-04T20:55:39.377397](https://huggingface.co/datasets/open-llm-leaderboard/details_hfl__chinese-mixtral/blob/main/results_2024-02-04T20-55-39.377397.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.6923714264378582, "acc_stderr": 0.03032741436858707, "acc_norm": 0.7058378526318035, "acc_norm_stderr": 0.031146777637985557, "mc1": 0.30599755201958384, "mc1_stderr": 0.01613222972815505, "mc2": 0.46858539506441044, "mc2_stderr": 0.014457363907207055 }, "harness|arc:challenge|25": { "acc": 0.6459044368600683, "acc_stderr": 0.013975454122756567, "acc_norm": 0.6757679180887372, "acc_norm_stderr": 0.01367881039951882 }, "harness|hellaswag|10": { "acc": 0.6534554869547898, "acc_stderr": 0.004748965717214273, "acc_norm": 0.853415654252141, "acc_norm_stderr": 0.0035296822858572646 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.39, "acc_stderr": 0.049020713000019756, "acc_norm": 0.39, "acc_norm_stderr": 0.049020713000019756 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.6592592592592592, "acc_stderr": 0.04094376269996793, "acc_norm": 0.6592592592592592, "acc_norm_stderr": 0.04094376269996793 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.7763157894736842, "acc_stderr": 0.03391160934343603, "acc_norm": 0.7763157894736842, "acc_norm_stderr": 0.03391160934343603 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.69, "acc_stderr": 0.04648231987117316, "acc_norm": 0.69, "acc_norm_stderr": 0.04648231987117316 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.7849056603773585, "acc_stderr": 0.02528839450289137, "acc_norm": 0.7849056603773585, "acc_norm_stderr": 0.02528839450289137 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.8472222222222222, "acc_stderr": 0.030085743248565656, "acc_norm": 0.8472222222222222, "acc_norm_stderr": 0.030085743248565656 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.55, "acc_stderr": 0.049999999999999996, "acc_norm": 0.55, "acc_norm_stderr": 0.049999999999999996 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.58, "acc_stderr": 0.049604496374885836, "acc_norm": 0.58, "acc_norm_stderr": 0.049604496374885836 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.43, "acc_stderr": 0.049756985195624284, "acc_norm": 0.43, "acc_norm_stderr": 0.049756985195624284 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.7109826589595376, "acc_stderr": 0.034564257450869995, "acc_norm": 0.7109826589595376, "acc_norm_stderr": 0.034564257450869995 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.5588235294117647, "acc_stderr": 0.04940635630605659, "acc_norm": 0.5588235294117647, "acc_norm_stderr": 0.04940635630605659 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.81, "acc_stderr": 0.039427724440366234, "acc_norm": 0.81, "acc_norm_stderr": 0.039427724440366234 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.6638297872340425, "acc_stderr": 0.030881618520676942, "acc_norm": 0.6638297872340425, "acc_norm_stderr": 0.030881618520676942 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.5964912280701754, "acc_stderr": 0.04615186962583707, "acc_norm": 0.5964912280701754, "acc_norm_stderr": 0.04615186962583707 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.6344827586206897, "acc_stderr": 0.04013124195424385, "acc_norm": 0.6344827586206897, "acc_norm_stderr": 0.04013124195424385 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.455026455026455, "acc_stderr": 0.025646928361049395, "acc_norm": 0.455026455026455, "acc_norm_stderr": 0.025646928361049395 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.5, "acc_stderr": 0.04472135954999579, "acc_norm": 0.5, "acc_norm_stderr": 0.04472135954999579 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.47, "acc_stderr": 0.05016135580465919, "acc_norm": 0.47, "acc_norm_stderr": 0.05016135580465919 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.8387096774193549, "acc_stderr": 0.020923327006423294, "acc_norm": 0.8387096774193549, "acc_norm_stderr": 0.020923327006423294 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.6009852216748769, "acc_stderr": 0.034454876862647144, "acc_norm": 0.6009852216748769, "acc_norm_stderr": 0.034454876862647144 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.74, "acc_stderr": 0.04408440022768078, "acc_norm": 0.74, "acc_norm_stderr": 0.04408440022768078 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.806060606060606, "acc_stderr": 0.030874145136562094, "acc_norm": 0.806060606060606, "acc_norm_stderr": 0.030874145136562094 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.8686868686868687, "acc_stderr": 0.024063156416822516, "acc_norm": 0.8686868686868687, "acc_norm_stderr": 0.024063156416822516 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.9326424870466321, "acc_stderr": 0.018088393839078912, "acc_norm": 0.9326424870466321, "acc_norm_stderr": 0.018088393839078912 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.6974358974358974, "acc_stderr": 0.02329088805377273, "acc_norm": 0.6974358974358974, "acc_norm_stderr": 0.02329088805377273 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.3333333333333333, "acc_stderr": 0.028742040903948485, "acc_norm": 0.3333333333333333, "acc_norm_stderr": 0.028742040903948485 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.7857142857142857, "acc_stderr": 0.02665353159671549, "acc_norm": 0.7857142857142857, "acc_norm_stderr": 0.02665353159671549 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.5033112582781457, "acc_stderr": 0.04082393379449654, "acc_norm": 0.5033112582781457, "acc_norm_stderr": 0.04082393379449654 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.8752293577981651, "acc_stderr": 0.014168298359156327, "acc_norm": 0.8752293577981651, "acc_norm_stderr": 0.014168298359156327 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.6203703703703703, "acc_stderr": 0.03309682581119035, "acc_norm": 0.6203703703703703, "acc_norm_stderr": 0.03309682581119035 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.8480392156862745, "acc_stderr": 0.025195658428931792, "acc_norm": 0.8480392156862745, "acc_norm_stderr": 0.025195658428931792 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.8523206751054853, "acc_stderr": 0.023094329582595698, "acc_norm": 0.8523206751054853, "acc_norm_stderr": 0.023094329582595698 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.7713004484304933, "acc_stderr": 0.028188240046929203, "acc_norm": 0.7713004484304933, "acc_norm_stderr": 0.028188240046929203 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.816793893129771, "acc_stderr": 0.03392770926494733, "acc_norm": 0.816793893129771, "acc_norm_stderr": 0.03392770926494733 }, "harness|hendrycksTest-international_law|5": { "acc": 0.8429752066115702, "acc_stderr": 0.03321244842547129, "acc_norm": 0.8429752066115702, "acc_norm_stderr": 0.03321244842547129 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.8611111111111112, "acc_stderr": 0.03343270062869622, "acc_norm": 0.8611111111111112, "acc_norm_stderr": 0.03343270062869622 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.7730061349693251, "acc_stderr": 0.03291099578615769, "acc_norm": 0.7730061349693251, "acc_norm_stderr": 0.03291099578615769 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.45535714285714285, "acc_stderr": 0.047268355537191, "acc_norm": 0.45535714285714285, "acc_norm_stderr": 0.047268355537191 }, "harness|hendrycksTest-management|5": { "acc": 0.8446601941747572, "acc_stderr": 0.035865947385739734, "acc_norm": 0.8446601941747572, "acc_norm_stderr": 0.035865947385739734 }, "harness|hendrycksTest-marketing|5": { "acc": 0.905982905982906, "acc_stderr": 0.019119892798924978, "acc_norm": 0.905982905982906, "acc_norm_stderr": 0.019119892798924978 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.77, "acc_stderr": 0.04229525846816507, "acc_norm": 0.77, "acc_norm_stderr": 0.04229525846816507 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.8786717752234994, "acc_stderr": 0.01167591388390672, "acc_norm": 0.8786717752234994, "acc_norm_stderr": 0.01167591388390672 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.7687861271676301, "acc_stderr": 0.02269865716785571, "acc_norm": 0.7687861271676301, "acc_norm_stderr": 0.02269865716785571 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.3553072625698324, "acc_stderr": 0.01600698993480319, "acc_norm": 0.3553072625698324, "acc_norm_stderr": 0.01600698993480319 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.8071895424836601, "acc_stderr": 0.022589318888176703, "acc_norm": 0.8071895424836601, "acc_norm_stderr": 0.022589318888176703 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.7877813504823151, "acc_stderr": 0.02322275679743511, "acc_norm": 0.7877813504823151, "acc_norm_stderr": 0.02322275679743511 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.8364197530864198, "acc_stderr": 0.020581466138257145, "acc_norm": 0.8364197530864198, "acc_norm_stderr": 0.020581466138257145 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.5283687943262412, "acc_stderr": 0.029779450957303055, "acc_norm": 0.5283687943262412, "acc_norm_stderr": 0.029779450957303055 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.5202086049543677, "acc_stderr": 0.012759801427767552, "acc_norm": 0.5202086049543677, "acc_norm_stderr": 0.012759801427767552 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.7904411764705882, "acc_stderr": 0.02472311040767707, "acc_norm": 0.7904411764705882, "acc_norm_stderr": 0.02472311040767707 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.7663398692810458, "acc_stderr": 0.017119158496044506, "acc_norm": 0.7663398692810458, "acc_norm_stderr": 0.017119158496044506 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.7, "acc_stderr": 0.04389311454644287, "acc_norm": 0.7, "acc_norm_stderr": 0.04389311454644287 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.7714285714285715, "acc_stderr": 0.026882144922307744, "acc_norm": 0.7714285714285715, "acc_norm_stderr": 0.026882144922307744 }, "harness|hendrycksTest-sociology|5": { "acc": 0.8855721393034826, "acc_stderr": 0.022509345325101706, "acc_norm": 0.8855721393034826, "acc_norm_stderr": 0.022509345325101706 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.89, "acc_stderr": 0.03144660377352202, "acc_norm": 0.89, "acc_norm_stderr": 0.03144660377352202 }, "harness|hendrycksTest-virology|5": { "acc": 0.5060240963855421, "acc_stderr": 0.03892212195333045, "acc_norm": 0.5060240963855421, "acc_norm_stderr": 0.03892212195333045 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.8713450292397661, "acc_stderr": 0.025679342723276894, "acc_norm": 0.8713450292397661, "acc_norm_stderr": 0.025679342723276894 }, "harness|truthfulqa:mc|0": { "mc1": 0.30599755201958384, "mc1_stderr": 0.01613222972815505, "mc2": 0.46858539506441044, "mc2_stderr": 0.014457363907207055 }, "harness|winogrande|5": { "acc": 0.8200473559589582, "acc_stderr": 0.010796468688068684 }, "harness|gsm8k|5": { "acc": 0.0, "acc_stderr": 0.0 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_hfl__chinese-mixtral
[ "region:us" ]
2024-02-02T07:45:29+00:00
{"pretty_name": "Evaluation run of hfl/chinese-mixtral", "dataset_summary": "Dataset automatically created during the evaluation run of model [hfl/chinese-mixtral](https://huggingface.co/hfl/chinese-mixtral) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_hfl__chinese-mixtral\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-02-04T20:55:39.377397](https://huggingface.co/datasets/open-llm-leaderboard/details_hfl__chinese-mixtral/blob/main/results_2024-02-04T20-55-39.377397.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.6923714264378582,\n \"acc_stderr\": 0.03032741436858707,\n \"acc_norm\": 0.7058378526318035,\n \"acc_norm_stderr\": 0.031146777637985557,\n \"mc1\": 0.30599755201958384,\n \"mc1_stderr\": 0.01613222972815505,\n \"mc2\": 0.46858539506441044,\n \"mc2_stderr\": 0.014457363907207055\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.6459044368600683,\n \"acc_stderr\": 0.013975454122756567,\n \"acc_norm\": 0.6757679180887372,\n \"acc_norm_stderr\": 0.01367881039951882\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.6534554869547898,\n \"acc_stderr\": 0.004748965717214273,\n \"acc_norm\": 0.853415654252141,\n \"acc_norm_stderr\": 0.0035296822858572646\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.39,\n \"acc_stderr\": 0.049020713000019756,\n \"acc_norm\": 0.39,\n \"acc_norm_stderr\": 0.049020713000019756\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.6592592592592592,\n \"acc_stderr\": 0.04094376269996793,\n \"acc_norm\": 0.6592592592592592,\n \"acc_norm_stderr\": 0.04094376269996793\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.7763157894736842,\n \"acc_stderr\": 0.03391160934343603,\n \"acc_norm\": 0.7763157894736842,\n \"acc_norm_stderr\": 0.03391160934343603\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.69,\n \"acc_stderr\": 0.04648231987117316,\n \"acc_norm\": 0.69,\n \"acc_norm_stderr\": 0.04648231987117316\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.7849056603773585,\n \"acc_stderr\": 0.02528839450289137,\n \"acc_norm\": 0.7849056603773585,\n \"acc_norm_stderr\": 0.02528839450289137\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.8472222222222222,\n \"acc_stderr\": 0.030085743248565656,\n \"acc_norm\": 0.8472222222222222,\n \"acc_norm_stderr\": 0.030085743248565656\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.55,\n \"acc_stderr\": 0.049999999999999996,\n \"acc_norm\": 0.55,\n \"acc_norm_stderr\": 0.049999999999999996\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.58,\n \"acc_stderr\": 0.049604496374885836,\n \"acc_norm\": 0.58,\n \"acc_norm_stderr\": 0.049604496374885836\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.43,\n \"acc_stderr\": 0.049756985195624284,\n \"acc_norm\": 0.43,\n \"acc_norm_stderr\": 0.049756985195624284\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.7109826589595376,\n \"acc_stderr\": 0.034564257450869995,\n \"acc_norm\": 0.7109826589595376,\n \"acc_norm_stderr\": 0.034564257450869995\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.5588235294117647,\n \"acc_stderr\": 0.04940635630605659,\n \"acc_norm\": 0.5588235294117647,\n \"acc_norm_stderr\": 0.04940635630605659\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.81,\n \"acc_stderr\": 0.039427724440366234,\n \"acc_norm\": 0.81,\n \"acc_norm_stderr\": 0.039427724440366234\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.6638297872340425,\n \"acc_stderr\": 0.030881618520676942,\n \"acc_norm\": 0.6638297872340425,\n \"acc_norm_stderr\": 0.030881618520676942\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.5964912280701754,\n \"acc_stderr\": 0.04615186962583707,\n \"acc_norm\": 0.5964912280701754,\n \"acc_norm_stderr\": 0.04615186962583707\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.6344827586206897,\n \"acc_stderr\": 0.04013124195424385,\n \"acc_norm\": 0.6344827586206897,\n \"acc_norm_stderr\": 0.04013124195424385\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.455026455026455,\n \"acc_stderr\": 0.025646928361049395,\n \"acc_norm\": 0.455026455026455,\n \"acc_norm_stderr\": 0.025646928361049395\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.5,\n \"acc_stderr\": 0.04472135954999579,\n \"acc_norm\": 0.5,\n \"acc_norm_stderr\": 0.04472135954999579\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.47,\n \"acc_stderr\": 0.05016135580465919,\n \"acc_norm\": 0.47,\n \"acc_norm_stderr\": 0.05016135580465919\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.8387096774193549,\n \"acc_stderr\": 0.020923327006423294,\n \"acc_norm\": 0.8387096774193549,\n \"acc_norm_stderr\": 0.020923327006423294\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.6009852216748769,\n \"acc_stderr\": 0.034454876862647144,\n \"acc_norm\": 0.6009852216748769,\n \"acc_norm_stderr\": 0.034454876862647144\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.74,\n \"acc_stderr\": 0.04408440022768078,\n \"acc_norm\": 0.74,\n \"acc_norm_stderr\": 0.04408440022768078\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.806060606060606,\n \"acc_stderr\": 0.030874145136562094,\n \"acc_norm\": 0.806060606060606,\n \"acc_norm_stderr\": 0.030874145136562094\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.8686868686868687,\n \"acc_stderr\": 0.024063156416822516,\n \"acc_norm\": 0.8686868686868687,\n \"acc_norm_stderr\": 0.024063156416822516\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.9326424870466321,\n \"acc_stderr\": 0.018088393839078912,\n \"acc_norm\": 0.9326424870466321,\n \"acc_norm_stderr\": 0.018088393839078912\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.6974358974358974,\n \"acc_stderr\": 0.02329088805377273,\n \"acc_norm\": 0.6974358974358974,\n \"acc_norm_stderr\": 0.02329088805377273\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.3333333333333333,\n \"acc_stderr\": 0.028742040903948485,\n \"acc_norm\": 0.3333333333333333,\n \"acc_norm_stderr\": 0.028742040903948485\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.7857142857142857,\n \"acc_stderr\": 0.02665353159671549,\n \"acc_norm\": 0.7857142857142857,\n \"acc_norm_stderr\": 0.02665353159671549\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.5033112582781457,\n \"acc_stderr\": 0.04082393379449654,\n \"acc_norm\": 0.5033112582781457,\n \"acc_norm_stderr\": 0.04082393379449654\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.8752293577981651,\n \"acc_stderr\": 0.014168298359156327,\n \"acc_norm\": 0.8752293577981651,\n \"acc_norm_stderr\": 0.014168298359156327\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.6203703703703703,\n \"acc_stderr\": 0.03309682581119035,\n \"acc_norm\": 0.6203703703703703,\n \"acc_norm_stderr\": 0.03309682581119035\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.8480392156862745,\n \"acc_stderr\": 0.025195658428931792,\n \"acc_norm\": 0.8480392156862745,\n \"acc_norm_stderr\": 0.025195658428931792\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.8523206751054853,\n \"acc_stderr\": 0.023094329582595698,\n \"acc_norm\": 0.8523206751054853,\n \"acc_norm_stderr\": 0.023094329582595698\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.7713004484304933,\n \"acc_stderr\": 0.028188240046929203,\n \"acc_norm\": 0.7713004484304933,\n \"acc_norm_stderr\": 0.028188240046929203\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.816793893129771,\n \"acc_stderr\": 0.03392770926494733,\n \"acc_norm\": 0.816793893129771,\n \"acc_norm_stderr\": 0.03392770926494733\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.8429752066115702,\n \"acc_stderr\": 0.03321244842547129,\n \"acc_norm\": 0.8429752066115702,\n \"acc_norm_stderr\": 0.03321244842547129\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.8611111111111112,\n \"acc_stderr\": 0.03343270062869622,\n \"acc_norm\": 0.8611111111111112,\n \"acc_norm_stderr\": 0.03343270062869622\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.7730061349693251,\n \"acc_stderr\": 0.03291099578615769,\n \"acc_norm\": 0.7730061349693251,\n \"acc_norm_stderr\": 0.03291099578615769\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.45535714285714285,\n \"acc_stderr\": 0.047268355537191,\n \"acc_norm\": 0.45535714285714285,\n \"acc_norm_stderr\": 0.047268355537191\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.8446601941747572,\n \"acc_stderr\": 0.035865947385739734,\n \"acc_norm\": 0.8446601941747572,\n \"acc_norm_stderr\": 0.035865947385739734\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.905982905982906,\n \"acc_stderr\": 0.019119892798924978,\n \"acc_norm\": 0.905982905982906,\n \"acc_norm_stderr\": 0.019119892798924978\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.77,\n \"acc_stderr\": 0.04229525846816507,\n \"acc_norm\": 0.77,\n \"acc_norm_stderr\": 0.04229525846816507\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.8786717752234994,\n \"acc_stderr\": 0.01167591388390672,\n \"acc_norm\": 0.8786717752234994,\n \"acc_norm_stderr\": 0.01167591388390672\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.7687861271676301,\n \"acc_stderr\": 0.02269865716785571,\n \"acc_norm\": 0.7687861271676301,\n \"acc_norm_stderr\": 0.02269865716785571\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.3553072625698324,\n \"acc_stderr\": 0.01600698993480319,\n \"acc_norm\": 0.3553072625698324,\n \"acc_norm_stderr\": 0.01600698993480319\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.8071895424836601,\n \"acc_stderr\": 0.022589318888176703,\n \"acc_norm\": 0.8071895424836601,\n \"acc_norm_stderr\": 0.022589318888176703\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.7877813504823151,\n \"acc_stderr\": 0.02322275679743511,\n \"acc_norm\": 0.7877813504823151,\n \"acc_norm_stderr\": 0.02322275679743511\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.8364197530864198,\n \"acc_stderr\": 0.020581466138257145,\n \"acc_norm\": 0.8364197530864198,\n \"acc_norm_stderr\": 0.020581466138257145\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.5283687943262412,\n \"acc_stderr\": 0.029779450957303055,\n \"acc_norm\": 0.5283687943262412,\n \"acc_norm_stderr\": 0.029779450957303055\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.5202086049543677,\n \"acc_stderr\": 0.012759801427767552,\n \"acc_norm\": 0.5202086049543677,\n \"acc_norm_stderr\": 0.012759801427767552\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.7904411764705882,\n \"acc_stderr\": 0.02472311040767707,\n \"acc_norm\": 0.7904411764705882,\n \"acc_norm_stderr\": 0.02472311040767707\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.7663398692810458,\n \"acc_stderr\": 0.017119158496044506,\n \"acc_norm\": 0.7663398692810458,\n \"acc_norm_stderr\": 0.017119158496044506\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.7,\n \"acc_stderr\": 0.04389311454644287,\n \"acc_norm\": 0.7,\n \"acc_norm_stderr\": 0.04389311454644287\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.7714285714285715,\n \"acc_stderr\": 0.026882144922307744,\n \"acc_norm\": 0.7714285714285715,\n \"acc_norm_stderr\": 0.026882144922307744\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.8855721393034826,\n \"acc_stderr\": 0.022509345325101706,\n \"acc_norm\": 0.8855721393034826,\n \"acc_norm_stderr\": 0.022509345325101706\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.89,\n \"acc_stderr\": 0.03144660377352202,\n \"acc_norm\": 0.89,\n \"acc_norm_stderr\": 0.03144660377352202\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.5060240963855421,\n \"acc_stderr\": 0.03892212195333045,\n \"acc_norm\": 0.5060240963855421,\n \"acc_norm_stderr\": 0.03892212195333045\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.8713450292397661,\n \"acc_stderr\": 0.025679342723276894,\n \"acc_norm\": 0.8713450292397661,\n \"acc_norm_stderr\": 0.025679342723276894\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.30599755201958384,\n \"mc1_stderr\": 0.01613222972815505,\n \"mc2\": 0.46858539506441044,\n \"mc2_stderr\": 0.014457363907207055\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.8200473559589582,\n \"acc_stderr\": 0.010796468688068684\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.0,\n \"acc_stderr\": 0.0\n }\n}\n```", "repo_url": "https://huggingface.co/hfl/chinese-mixtral", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_02_02T07_43_13.375252", "path": ["**/details_harness|arc:challenge|25_2024-02-02T07-43-13.375252.parquet"]}, {"split": "2024_02_04T20_55_39.377397", "path": ["**/details_harness|arc:challenge|25_2024-02-04T20-55-39.377397.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-02-04T20-55-39.377397.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_02_02T07_43_13.375252", "path": ["**/details_harness|gsm8k|5_2024-02-02T07-43-13.375252.parquet"]}, {"split": "2024_02_04T20_55_39.377397", "path": ["**/details_harness|gsm8k|5_2024-02-04T20-55-39.377397.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-02-04T20-55-39.377397.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_02_02T07_43_13.375252", "path": ["**/details_harness|hellaswag|10_2024-02-02T07-43-13.375252.parquet"]}, {"split": "2024_02_04T20_55_39.377397", "path": ["**/details_harness|hellaswag|10_2024-02-04T20-55-39.377397.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-02-04T20-55-39.377397.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_02_02T07_43_13.375252", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-02T07-43-13.375252.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-02-02T07-43-13.375252.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-02-02T07-43-13.375252.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-02-02T07-43-13.375252.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-02T07-43-13.375252.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-02-02T07-43-13.375252.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-02-02T07-43-13.375252.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-02-02T07-43-13.375252.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-02-02T07-43-13.375252.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-02-02T07-43-13.375252.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-02-02T07-43-13.375252.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-02-02T07-43-13.375252.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-02T07-43-13.375252.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-02-02T07-43-13.375252.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-02T07-43-13.375252.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-02T07-43-13.375252.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-02-02T07-43-13.375252.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-02-02T07-43-13.375252.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-02-02T07-43-13.375252.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-02T07-43-13.375252.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-02T07-43-13.375252.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-02T07-43-13.375252.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-02-02T07-43-13.375252.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-02T07-43-13.375252.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-02T07-43-13.375252.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-02T07-43-13.375252.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-02T07-43-13.375252.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-02-02T07-43-13.375252.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-02T07-43-13.375252.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-02T07-43-13.375252.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-02T07-43-13.375252.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-02T07-43-13.375252.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-02-02T07-43-13.375252.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-02-02T07-43-13.375252.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-02-02T07-43-13.375252.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-02-02T07-43-13.375252.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-02T07-43-13.375252.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-02-02T07-43-13.375252.parquet", "**/details_harness|hendrycksTest-management|5_2024-02-02T07-43-13.375252.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-02-02T07-43-13.375252.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-02-02T07-43-13.375252.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-02-02T07-43-13.375252.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-02-02T07-43-13.375252.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-02T07-43-13.375252.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-02-02T07-43-13.375252.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-02-02T07-43-13.375252.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-02-02T07-43-13.375252.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-02-02T07-43-13.375252.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-02-02T07-43-13.375252.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-02-02T07-43-13.375252.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-02-02T07-43-13.375252.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-02-02T07-43-13.375252.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-02-02T07-43-13.375252.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-02-02T07-43-13.375252.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-02T07-43-13.375252.parquet", "**/details_harness|hendrycksTest-virology|5_2024-02-02T07-43-13.375252.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-02-02T07-43-13.375252.parquet"]}, {"split": "2024_02_04T20_55_39.377397", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-04T20-55-39.377397.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-02-04T20-55-39.377397.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-02-04T20-55-39.377397.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-02-04T20-55-39.377397.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-04T20-55-39.377397.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-02-04T20-55-39.377397.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-02-04T20-55-39.377397.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-02-04T20-55-39.377397.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-02-04T20-55-39.377397.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-02-04T20-55-39.377397.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-02-04T20-55-39.377397.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-02-04T20-55-39.377397.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-04T20-55-39.377397.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-02-04T20-55-39.377397.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-04T20-55-39.377397.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-04T20-55-39.377397.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-02-04T20-55-39.377397.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-02-04T20-55-39.377397.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-02-04T20-55-39.377397.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-04T20-55-39.377397.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-04T20-55-39.377397.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-04T20-55-39.377397.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-02-04T20-55-39.377397.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-04T20-55-39.377397.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-04T20-55-39.377397.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-04T20-55-39.377397.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-04T20-55-39.377397.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-02-04T20-55-39.377397.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-04T20-55-39.377397.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-04T20-55-39.377397.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-04T20-55-39.377397.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-04T20-55-39.377397.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-02-04T20-55-39.377397.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-02-04T20-55-39.377397.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-02-04T20-55-39.377397.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-02-04T20-55-39.377397.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-04T20-55-39.377397.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-02-04T20-55-39.377397.parquet", "**/details_harness|hendrycksTest-management|5_2024-02-04T20-55-39.377397.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-02-04T20-55-39.377397.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-02-04T20-55-39.377397.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-02-04T20-55-39.377397.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-02-04T20-55-39.377397.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-04T20-55-39.377397.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-02-04T20-55-39.377397.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-02-04T20-55-39.377397.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-02-04T20-55-39.377397.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-02-04T20-55-39.377397.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-02-04T20-55-39.377397.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-02-04T20-55-39.377397.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-02-04T20-55-39.377397.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-02-04T20-55-39.377397.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-02-04T20-55-39.377397.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-02-04T20-55-39.377397.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-04T20-55-39.377397.parquet", "**/details_harness|hendrycksTest-virology|5_2024-02-04T20-55-39.377397.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-02-04T20-55-39.377397.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-04T20-55-39.377397.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-02-04T20-55-39.377397.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-02-04T20-55-39.377397.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-02-04T20-55-39.377397.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-04T20-55-39.377397.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-02-04T20-55-39.377397.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-02-04T20-55-39.377397.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-02-04T20-55-39.377397.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-02-04T20-55-39.377397.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-02-04T20-55-39.377397.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-02-04T20-55-39.377397.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-02-04T20-55-39.377397.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-04T20-55-39.377397.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-02-04T20-55-39.377397.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-04T20-55-39.377397.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-04T20-55-39.377397.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-02-04T20-55-39.377397.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-02-04T20-55-39.377397.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-02-04T20-55-39.377397.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-04T20-55-39.377397.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-04T20-55-39.377397.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-04T20-55-39.377397.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-02-04T20-55-39.377397.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-04T20-55-39.377397.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-04T20-55-39.377397.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-04T20-55-39.377397.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-04T20-55-39.377397.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-02-04T20-55-39.377397.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-04T20-55-39.377397.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-04T20-55-39.377397.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-04T20-55-39.377397.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-04T20-55-39.377397.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-02-04T20-55-39.377397.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-02-04T20-55-39.377397.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-02-04T20-55-39.377397.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-02-04T20-55-39.377397.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-04T20-55-39.377397.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-02-04T20-55-39.377397.parquet", "**/details_harness|hendrycksTest-management|5_2024-02-04T20-55-39.377397.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-02-04T20-55-39.377397.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-02-04T20-55-39.377397.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-02-04T20-55-39.377397.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-02-04T20-55-39.377397.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-04T20-55-39.377397.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-02-04T20-55-39.377397.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-02-04T20-55-39.377397.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-02-04T20-55-39.377397.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-02-04T20-55-39.377397.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-02-04T20-55-39.377397.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-02-04T20-55-39.377397.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-02-04T20-55-39.377397.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-02-04T20-55-39.377397.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-02-04T20-55-39.377397.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-02-04T20-55-39.377397.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-04T20-55-39.377397.parquet", "**/details_harness|hendrycksTest-virology|5_2024-02-04T20-55-39.377397.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-02-04T20-55-39.377397.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_02_02T07_43_13.375252", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-02T07-43-13.375252.parquet"]}, {"split": "2024_02_04T20_55_39.377397", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-04T20-55-39.377397.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-04T20-55-39.377397.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_02_02T07_43_13.375252", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-02-02T07-43-13.375252.parquet"]}, {"split": "2024_02_04T20_55_39.377397", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-02-04T20-55-39.377397.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-02-04T20-55-39.377397.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_02_02T07_43_13.375252", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-02-02T07-43-13.375252.parquet"]}, {"split": "2024_02_04T20_55_39.377397", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-02-04T20-55-39.377397.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-02-04T20-55-39.377397.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_02_02T07_43_13.375252", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-02-02T07-43-13.375252.parquet"]}, {"split": "2024_02_04T20_55_39.377397", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-02-04T20-55-39.377397.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-02-04T20-55-39.377397.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_02_02T07_43_13.375252", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-02T07-43-13.375252.parquet"]}, {"split": "2024_02_04T20_55_39.377397", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-04T20-55-39.377397.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-04T20-55-39.377397.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_02_02T07_43_13.375252", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-02-02T07-43-13.375252.parquet"]}, {"split": "2024_02_04T20_55_39.377397", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-02-04T20-55-39.377397.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-02-04T20-55-39.377397.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_02_02T07_43_13.375252", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-02-02T07-43-13.375252.parquet"]}, {"split": "2024_02_04T20_55_39.377397", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-02-04T20-55-39.377397.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-02-04T20-55-39.377397.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_02_02T07_43_13.375252", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-02-02T07-43-13.375252.parquet"]}, {"split": "2024_02_04T20_55_39.377397", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-02-04T20-55-39.377397.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-02-04T20-55-39.377397.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_02_02T07_43_13.375252", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-02-02T07-43-13.375252.parquet"]}, {"split": "2024_02_04T20_55_39.377397", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-02-04T20-55-39.377397.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-02-04T20-55-39.377397.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_02_02T07_43_13.375252", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-02-02T07-43-13.375252.parquet"]}, {"split": "2024_02_04T20_55_39.377397", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-02-04T20-55-39.377397.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-02-04T20-55-39.377397.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_02_02T07_43_13.375252", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-02-02T07-43-13.375252.parquet"]}, {"split": "2024_02_04T20_55_39.377397", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-02-04T20-55-39.377397.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-02-04T20-55-39.377397.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_02_02T07_43_13.375252", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-02-02T07-43-13.375252.parquet"]}, {"split": "2024_02_04T20_55_39.377397", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-02-04T20-55-39.377397.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-02-04T20-55-39.377397.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_02_02T07_43_13.375252", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-02T07-43-13.375252.parquet"]}, {"split": "2024_02_04T20_55_39.377397", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-04T20-55-39.377397.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-04T20-55-39.377397.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_02_02T07_43_13.375252", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-02-02T07-43-13.375252.parquet"]}, {"split": "2024_02_04T20_55_39.377397", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-02-04T20-55-39.377397.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-02-04T20-55-39.377397.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_02_02T07_43_13.375252", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-02T07-43-13.375252.parquet"]}, {"split": "2024_02_04T20_55_39.377397", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-04T20-55-39.377397.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-04T20-55-39.377397.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_02_02T07_43_13.375252", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-02T07-43-13.375252.parquet"]}, {"split": "2024_02_04T20_55_39.377397", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-04T20-55-39.377397.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-04T20-55-39.377397.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_02_02T07_43_13.375252", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-02-02T07-43-13.375252.parquet"]}, {"split": "2024_02_04T20_55_39.377397", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-02-04T20-55-39.377397.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-02-04T20-55-39.377397.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_02_02T07_43_13.375252", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-02-02T07-43-13.375252.parquet"]}, {"split": "2024_02_04T20_55_39.377397", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-02-04T20-55-39.377397.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-02-04T20-55-39.377397.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_02_02T07_43_13.375252", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-02-02T07-43-13.375252.parquet"]}, {"split": "2024_02_04T20_55_39.377397", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-02-04T20-55-39.377397.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-02-04T20-55-39.377397.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_02_02T07_43_13.375252", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-02T07-43-13.375252.parquet"]}, {"split": "2024_02_04T20_55_39.377397", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-04T20-55-39.377397.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-04T20-55-39.377397.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_02_02T07_43_13.375252", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-02T07-43-13.375252.parquet"]}, {"split": "2024_02_04T20_55_39.377397", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-04T20-55-39.377397.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-04T20-55-39.377397.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_02_02T07_43_13.375252", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-02T07-43-13.375252.parquet"]}, {"split": "2024_02_04T20_55_39.377397", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-04T20-55-39.377397.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-04T20-55-39.377397.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_02_02T07_43_13.375252", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-02-02T07-43-13.375252.parquet"]}, {"split": "2024_02_04T20_55_39.377397", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-02-04T20-55-39.377397.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-02-04T20-55-39.377397.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_02_02T07_43_13.375252", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-02T07-43-13.375252.parquet"]}, {"split": "2024_02_04T20_55_39.377397", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-04T20-55-39.377397.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-04T20-55-39.377397.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_02_02T07_43_13.375252", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-02T07-43-13.375252.parquet"]}, {"split": "2024_02_04T20_55_39.377397", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-04T20-55-39.377397.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-04T20-55-39.377397.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_02_02T07_43_13.375252", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-02T07-43-13.375252.parquet"]}, {"split": "2024_02_04T20_55_39.377397", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-04T20-55-39.377397.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-04T20-55-39.377397.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_02_02T07_43_13.375252", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-02T07-43-13.375252.parquet"]}, {"split": "2024_02_04T20_55_39.377397", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-04T20-55-39.377397.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-04T20-55-39.377397.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_02_02T07_43_13.375252", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-02-02T07-43-13.375252.parquet"]}, {"split": "2024_02_04T20_55_39.377397", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-02-04T20-55-39.377397.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-02-04T20-55-39.377397.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_02_02T07_43_13.375252", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-02T07-43-13.375252.parquet"]}, {"split": "2024_02_04T20_55_39.377397", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-04T20-55-39.377397.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-04T20-55-39.377397.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_02_02T07_43_13.375252", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-02T07-43-13.375252.parquet"]}, {"split": "2024_02_04T20_55_39.377397", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-04T20-55-39.377397.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-04T20-55-39.377397.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_02_02T07_43_13.375252", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-02T07-43-13.375252.parquet"]}, {"split": "2024_02_04T20_55_39.377397", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-04T20-55-39.377397.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-04T20-55-39.377397.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_02_02T07_43_13.375252", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-02T07-43-13.375252.parquet"]}, {"split": "2024_02_04T20_55_39.377397", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-04T20-55-39.377397.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-04T20-55-39.377397.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_02_02T07_43_13.375252", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-02-02T07-43-13.375252.parquet"]}, {"split": "2024_02_04T20_55_39.377397", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-02-04T20-55-39.377397.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-02-04T20-55-39.377397.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_02_02T07_43_13.375252", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-02-02T07-43-13.375252.parquet"]}, {"split": "2024_02_04T20_55_39.377397", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-02-04T20-55-39.377397.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-02-04T20-55-39.377397.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_02_02T07_43_13.375252", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-02-02T07-43-13.375252.parquet"]}, {"split": "2024_02_04T20_55_39.377397", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-02-04T20-55-39.377397.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-02-04T20-55-39.377397.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_02_02T07_43_13.375252", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-02-02T07-43-13.375252.parquet"]}, {"split": "2024_02_04T20_55_39.377397", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-02-04T20-55-39.377397.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-02-04T20-55-39.377397.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_02_02T07_43_13.375252", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-02T07-43-13.375252.parquet"]}, {"split": "2024_02_04T20_55_39.377397", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-04T20-55-39.377397.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-04T20-55-39.377397.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_02_02T07_43_13.375252", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-02-02T07-43-13.375252.parquet"]}, {"split": "2024_02_04T20_55_39.377397", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-02-04T20-55-39.377397.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-02-04T20-55-39.377397.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_02_02T07_43_13.375252", "path": ["**/details_harness|hendrycksTest-management|5_2024-02-02T07-43-13.375252.parquet"]}, {"split": "2024_02_04T20_55_39.377397", "path": ["**/details_harness|hendrycksTest-management|5_2024-02-04T20-55-39.377397.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-02-04T20-55-39.377397.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_02_02T07_43_13.375252", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-02-02T07-43-13.375252.parquet"]}, {"split": "2024_02_04T20_55_39.377397", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-02-04T20-55-39.377397.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-02-04T20-55-39.377397.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_02_02T07_43_13.375252", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-02-02T07-43-13.375252.parquet"]}, {"split": "2024_02_04T20_55_39.377397", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-02-04T20-55-39.377397.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-02-04T20-55-39.377397.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_02_02T07_43_13.375252", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-02-02T07-43-13.375252.parquet"]}, {"split": "2024_02_04T20_55_39.377397", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-02-04T20-55-39.377397.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-02-04T20-55-39.377397.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_02_02T07_43_13.375252", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-02-02T07-43-13.375252.parquet"]}, {"split": "2024_02_04T20_55_39.377397", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-02-04T20-55-39.377397.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-02-04T20-55-39.377397.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_02_02T07_43_13.375252", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-02T07-43-13.375252.parquet"]}, {"split": "2024_02_04T20_55_39.377397", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-04T20-55-39.377397.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-04T20-55-39.377397.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_02_02T07_43_13.375252", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-02-02T07-43-13.375252.parquet"]}, {"split": "2024_02_04T20_55_39.377397", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-02-04T20-55-39.377397.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-02-04T20-55-39.377397.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_02_02T07_43_13.375252", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-02-02T07-43-13.375252.parquet"]}, {"split": "2024_02_04T20_55_39.377397", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-02-04T20-55-39.377397.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-02-04T20-55-39.377397.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_02_02T07_43_13.375252", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-02-02T07-43-13.375252.parquet"]}, {"split": "2024_02_04T20_55_39.377397", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-02-04T20-55-39.377397.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-02-04T20-55-39.377397.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_02_02T07_43_13.375252", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-02-02T07-43-13.375252.parquet"]}, {"split": "2024_02_04T20_55_39.377397", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-02-04T20-55-39.377397.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-02-04T20-55-39.377397.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_02_02T07_43_13.375252", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-02-02T07-43-13.375252.parquet"]}, {"split": "2024_02_04T20_55_39.377397", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-02-04T20-55-39.377397.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-02-04T20-55-39.377397.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_02_02T07_43_13.375252", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-02-02T07-43-13.375252.parquet"]}, {"split": "2024_02_04T20_55_39.377397", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-02-04T20-55-39.377397.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-02-04T20-55-39.377397.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_02_02T07_43_13.375252", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-02-02T07-43-13.375252.parquet"]}, {"split": "2024_02_04T20_55_39.377397", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-02-04T20-55-39.377397.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-02-04T20-55-39.377397.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_02_02T07_43_13.375252", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-02-02T07-43-13.375252.parquet"]}, {"split": "2024_02_04T20_55_39.377397", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-02-04T20-55-39.377397.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-02-04T20-55-39.377397.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_02_02T07_43_13.375252", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-02-02T07-43-13.375252.parquet"]}, {"split": "2024_02_04T20_55_39.377397", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-02-04T20-55-39.377397.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-02-04T20-55-39.377397.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_02_02T07_43_13.375252", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-02-02T07-43-13.375252.parquet"]}, {"split": "2024_02_04T20_55_39.377397", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-02-04T20-55-39.377397.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-02-04T20-55-39.377397.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_02_02T07_43_13.375252", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-02T07-43-13.375252.parquet"]}, {"split": "2024_02_04T20_55_39.377397", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-04T20-55-39.377397.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-04T20-55-39.377397.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_02_02T07_43_13.375252", "path": ["**/details_harness|hendrycksTest-virology|5_2024-02-02T07-43-13.375252.parquet"]}, {"split": "2024_02_04T20_55_39.377397", "path": ["**/details_harness|hendrycksTest-virology|5_2024-02-04T20-55-39.377397.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-02-04T20-55-39.377397.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_02_02T07_43_13.375252", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-02-02T07-43-13.375252.parquet"]}, {"split": "2024_02_04T20_55_39.377397", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-02-04T20-55-39.377397.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-02-04T20-55-39.377397.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_02_02T07_43_13.375252", "path": ["**/details_harness|truthfulqa:mc|0_2024-02-02T07-43-13.375252.parquet"]}, {"split": "2024_02_04T20_55_39.377397", "path": ["**/details_harness|truthfulqa:mc|0_2024-02-04T20-55-39.377397.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-02-04T20-55-39.377397.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_02_02T07_43_13.375252", "path": ["**/details_harness|winogrande|5_2024-02-02T07-43-13.375252.parquet"]}, {"split": "2024_02_04T20_55_39.377397", "path": ["**/details_harness|winogrande|5_2024-02-04T20-55-39.377397.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-02-04T20-55-39.377397.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_02_02T07_43_13.375252", "path": ["results_2024-02-02T07-43-13.375252.parquet"]}, {"split": "2024_02_04T20_55_39.377397", "path": ["results_2024-02-04T20-55-39.377397.parquet"]}, {"split": "latest", "path": ["results_2024-02-04T20-55-39.377397.parquet"]}]}]}
2024-02-04T20:58:22+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of hfl/chinese-mixtral Dataset automatically created during the evaluation run of model hfl/chinese-mixtral on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-02-04T20:55:39.377397(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of hfl/chinese-mixtral\n\n\n\nDataset automatically created during the evaluation run of model hfl/chinese-mixtral on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-02-04T20:55:39.377397(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of hfl/chinese-mixtral\n\n\n\nDataset automatically created during the evaluation run of model hfl/chinese-mixtral on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-02-04T20:55:39.377397(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
825491e55eb5daa49b6a83d2fbe0bfa41bcfb1a6
# Dataset Card for Evaluation run of hfl/chinese-mixtral-instruct <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [hfl/chinese-mixtral-instruct](https://huggingface.co/hfl/chinese-mixtral-instruct) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_hfl__chinese-mixtral-instruct", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-02-02T07:46:52.047789](https://huggingface.co/datasets/open-llm-leaderboard/details_hfl__chinese-mixtral-instruct/blob/main/results_2024-02-02T07-46-52.047789.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.7125789212790726, "acc_stderr": 0.03028360858938214, "acc_norm": 0.7170118110932137, "acc_norm_stderr": 0.030873801474279663, "mc1": 0.4173806609547124, "mc1_stderr": 0.01726289106327218, "mc2": 0.5746085023184442, "mc2_stderr": 0.015092112041318098 }, "harness|arc:challenge|25": { "acc": 0.6459044368600683, "acc_stderr": 0.013975454122756564, "acc_norm": 0.6774744027303754, "acc_norm_stderr": 0.013659980894277368 }, "harness|hellaswag|10": { "acc": 0.6643098984266083, "acc_stderr": 0.004712660409846844, "acc_norm": 0.8567018522206732, "acc_norm_stderr": 0.0034966056729606866 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.37, "acc_stderr": 0.04852365870939099, "acc_norm": 0.37, "acc_norm_stderr": 0.04852365870939099 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.6962962962962963, "acc_stderr": 0.03972552884785136, "acc_norm": 0.6962962962962963, "acc_norm_stderr": 0.03972552884785136 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.8289473684210527, "acc_stderr": 0.030643607071677098, "acc_norm": 0.8289473684210527, "acc_norm_stderr": 0.030643607071677098 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.75, "acc_stderr": 0.04351941398892446, "acc_norm": 0.75, "acc_norm_stderr": 0.04351941398892446 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.7886792452830189, "acc_stderr": 0.025125766484827845, "acc_norm": 0.7886792452830189, "acc_norm_stderr": 0.025125766484827845 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.8333333333333334, "acc_stderr": 0.031164899666948617, "acc_norm": 0.8333333333333334, "acc_norm_stderr": 0.031164899666948617 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.54, "acc_stderr": 0.05009082659620333, "acc_norm": 0.54, "acc_norm_stderr": 0.05009082659620333 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.63, "acc_stderr": 0.048523658709391, "acc_norm": 0.63, "acc_norm_stderr": 0.048523658709391 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.42, "acc_stderr": 0.04960449637488584, "acc_norm": 0.42, "acc_norm_stderr": 0.04960449637488584 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.7109826589595376, "acc_stderr": 0.03456425745086999, "acc_norm": 0.7109826589595376, "acc_norm_stderr": 0.03456425745086999 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.5, "acc_stderr": 0.04975185951049946, "acc_norm": 0.5, "acc_norm_stderr": 0.04975185951049946 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.78, "acc_stderr": 0.04163331998932263, "acc_norm": 0.78, "acc_norm_stderr": 0.04163331998932263 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.676595744680851, "acc_stderr": 0.030579442773610337, "acc_norm": 0.676595744680851, "acc_norm_stderr": 0.030579442773610337 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.6140350877192983, "acc_stderr": 0.04579639422070435, "acc_norm": 0.6140350877192983, "acc_norm_stderr": 0.04579639422070435 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.6620689655172414, "acc_stderr": 0.039417076320648906, "acc_norm": 0.6620689655172414, "acc_norm_stderr": 0.039417076320648906 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.5026455026455027, "acc_stderr": 0.025750949678130387, "acc_norm": 0.5026455026455027, "acc_norm_stderr": 0.025750949678130387 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.5952380952380952, "acc_stderr": 0.04390259265377562, "acc_norm": 0.5952380952380952, "acc_norm_stderr": 0.04390259265377562 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.51, "acc_stderr": 0.05024183937956912, "acc_norm": 0.51, "acc_norm_stderr": 0.05024183937956912 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.8096774193548387, "acc_stderr": 0.02233170761182307, "acc_norm": 0.8096774193548387, "acc_norm_stderr": 0.02233170761182307 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.6502463054187192, "acc_stderr": 0.03355400904969566, "acc_norm": 0.6502463054187192, "acc_norm_stderr": 0.03355400904969566 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.73, "acc_stderr": 0.044619604333847394, "acc_norm": 0.73, "acc_norm_stderr": 0.044619604333847394 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.8121212121212121, "acc_stderr": 0.03050193405942914, "acc_norm": 0.8121212121212121, "acc_norm_stderr": 0.03050193405942914 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.8636363636363636, "acc_stderr": 0.024450155973189835, "acc_norm": 0.8636363636363636, "acc_norm_stderr": 0.024450155973189835 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.9481865284974094, "acc_stderr": 0.01599622932024412, "acc_norm": 0.9481865284974094, "acc_norm_stderr": 0.01599622932024412 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.7102564102564103, "acc_stderr": 0.02300062824368797, "acc_norm": 0.7102564102564103, "acc_norm_stderr": 0.02300062824368797 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.4, "acc_stderr": 0.029869605095316904, "acc_norm": 0.4, "acc_norm_stderr": 0.029869605095316904 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.7815126050420168, "acc_stderr": 0.02684151432295893, "acc_norm": 0.7815126050420168, "acc_norm_stderr": 0.02684151432295893 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.4900662251655629, "acc_stderr": 0.04081677107248436, "acc_norm": 0.4900662251655629, "acc_norm_stderr": 0.04081677107248436 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.8697247706422019, "acc_stderr": 0.014431862852473259, "acc_norm": 0.8697247706422019, "acc_norm_stderr": 0.014431862852473259 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.6203703703703703, "acc_stderr": 0.03309682581119035, "acc_norm": 0.6203703703703703, "acc_norm_stderr": 0.03309682581119035 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.8676470588235294, "acc_stderr": 0.02378429752091885, "acc_norm": 0.8676470588235294, "acc_norm_stderr": 0.02378429752091885 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.8987341772151899, "acc_stderr": 0.019637720526065515, "acc_norm": 0.8987341772151899, "acc_norm_stderr": 0.019637720526065515 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.7443946188340808, "acc_stderr": 0.029275891003969927, "acc_norm": 0.7443946188340808, "acc_norm_stderr": 0.029275891003969927 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.8244274809160306, "acc_stderr": 0.03336820338476074, "acc_norm": 0.8244274809160306, "acc_norm_stderr": 0.03336820338476074 }, "harness|hendrycksTest-international_law|5": { "acc": 0.859504132231405, "acc_stderr": 0.031722334260021585, "acc_norm": 0.859504132231405, "acc_norm_stderr": 0.031722334260021585 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.8425925925925926, "acc_stderr": 0.03520703990517963, "acc_norm": 0.8425925925925926, "acc_norm_stderr": 0.03520703990517963 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.7914110429447853, "acc_stderr": 0.031921934489347235, "acc_norm": 0.7914110429447853, "acc_norm_stderr": 0.031921934489347235 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.4642857142857143, "acc_stderr": 0.04733667890053756, "acc_norm": 0.4642857142857143, "acc_norm_stderr": 0.04733667890053756 }, "harness|hendrycksTest-management|5": { "acc": 0.8446601941747572, "acc_stderr": 0.03586594738573974, "acc_norm": 0.8446601941747572, "acc_norm_stderr": 0.03586594738573974 }, "harness|hendrycksTest-marketing|5": { "acc": 0.905982905982906, "acc_stderr": 0.019119892798924978, "acc_norm": 0.905982905982906, "acc_norm_stderr": 0.019119892798924978 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.82, "acc_stderr": 0.03861229196653694, "acc_norm": 0.82, "acc_norm_stderr": 0.03861229196653694 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.8914431673052363, "acc_stderr": 0.011124283175851199, "acc_norm": 0.8914431673052363, "acc_norm_stderr": 0.011124283175851199 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.7630057803468208, "acc_stderr": 0.02289408248992599, "acc_norm": 0.7630057803468208, "acc_norm_stderr": 0.02289408248992599 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.464804469273743, "acc_stderr": 0.016681020931076655, "acc_norm": 0.464804469273743, "acc_norm_stderr": 0.016681020931076655 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.7875816993464052, "acc_stderr": 0.023420375478296136, "acc_norm": 0.7875816993464052, "acc_norm_stderr": 0.023420375478296136 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.7813504823151125, "acc_stderr": 0.02347558141786111, "acc_norm": 0.7813504823151125, "acc_norm_stderr": 0.02347558141786111 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.8518518518518519, "acc_stderr": 0.019766459563597252, "acc_norm": 0.8518518518518519, "acc_norm_stderr": 0.019766459563597252 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.5602836879432624, "acc_stderr": 0.029609912075594113, "acc_norm": 0.5602836879432624, "acc_norm_stderr": 0.029609912075594113 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.5345501955671447, "acc_stderr": 0.012739711554045713, "acc_norm": 0.5345501955671447, "acc_norm_stderr": 0.012739711554045713 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.7573529411764706, "acc_stderr": 0.026040662474201257, "acc_norm": 0.7573529411764706, "acc_norm_stderr": 0.026040662474201257 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.7679738562091504, "acc_stderr": 0.017077373377856926, "acc_norm": 0.7679738562091504, "acc_norm_stderr": 0.017077373377856926 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.7090909090909091, "acc_stderr": 0.04350271442923243, "acc_norm": 0.7090909090909091, "acc_norm_stderr": 0.04350271442923243 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.8040816326530612, "acc_stderr": 0.025409301953225678, "acc_norm": 0.8040816326530612, "acc_norm_stderr": 0.025409301953225678 }, "harness|hendrycksTest-sociology|5": { "acc": 0.8606965174129353, "acc_stderr": 0.024484487162913973, "acc_norm": 0.8606965174129353, "acc_norm_stderr": 0.024484487162913973 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.9, "acc_stderr": 0.030151134457776334, "acc_norm": 0.9, "acc_norm_stderr": 0.030151134457776334 }, "harness|hendrycksTest-virology|5": { "acc": 0.5, "acc_stderr": 0.03892494720807614, "acc_norm": 0.5, "acc_norm_stderr": 0.03892494720807614 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.8771929824561403, "acc_stderr": 0.025172984350155747, "acc_norm": 0.8771929824561403, "acc_norm_stderr": 0.025172984350155747 }, "harness|truthfulqa:mc|0": { "mc1": 0.4173806609547124, "mc1_stderr": 0.01726289106327218, "mc2": 0.5746085023184442, "mc2_stderr": 0.015092112041318098 }, "harness|winogrande|5": { "acc": 0.8310970797158642, "acc_stderr": 0.010529981411838899 }, "harness|gsm8k|5": { "acc": 0.5564821834723275, "acc_stderr": 0.013684327592606165 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_hfl__chinese-mixtral-instruct
[ "region:us" ]
2024-02-02T07:49:07+00:00
{"pretty_name": "Evaluation run of hfl/chinese-mixtral-instruct", "dataset_summary": "Dataset automatically created during the evaluation run of model [hfl/chinese-mixtral-instruct](https://huggingface.co/hfl/chinese-mixtral-instruct) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_hfl__chinese-mixtral-instruct\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-02-02T07:46:52.047789](https://huggingface.co/datasets/open-llm-leaderboard/details_hfl__chinese-mixtral-instruct/blob/main/results_2024-02-02T07-46-52.047789.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.7125789212790726,\n \"acc_stderr\": 0.03028360858938214,\n \"acc_norm\": 0.7170118110932137,\n \"acc_norm_stderr\": 0.030873801474279663,\n \"mc1\": 0.4173806609547124,\n \"mc1_stderr\": 0.01726289106327218,\n \"mc2\": 0.5746085023184442,\n \"mc2_stderr\": 0.015092112041318098\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.6459044368600683,\n \"acc_stderr\": 0.013975454122756564,\n \"acc_norm\": 0.6774744027303754,\n \"acc_norm_stderr\": 0.013659980894277368\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.6643098984266083,\n \"acc_stderr\": 0.004712660409846844,\n \"acc_norm\": 0.8567018522206732,\n \"acc_norm_stderr\": 0.0034966056729606866\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.37,\n \"acc_stderr\": 0.04852365870939099,\n \"acc_norm\": 0.37,\n \"acc_norm_stderr\": 0.04852365870939099\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.6962962962962963,\n \"acc_stderr\": 0.03972552884785136,\n \"acc_norm\": 0.6962962962962963,\n \"acc_norm_stderr\": 0.03972552884785136\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.8289473684210527,\n \"acc_stderr\": 0.030643607071677098,\n \"acc_norm\": 0.8289473684210527,\n \"acc_norm_stderr\": 0.030643607071677098\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.75,\n \"acc_stderr\": 0.04351941398892446,\n \"acc_norm\": 0.75,\n \"acc_norm_stderr\": 0.04351941398892446\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.7886792452830189,\n \"acc_stderr\": 0.025125766484827845,\n \"acc_norm\": 0.7886792452830189,\n \"acc_norm_stderr\": 0.025125766484827845\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.8333333333333334,\n \"acc_stderr\": 0.031164899666948617,\n \"acc_norm\": 0.8333333333333334,\n \"acc_norm_stderr\": 0.031164899666948617\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.54,\n \"acc_stderr\": 0.05009082659620333,\n \"acc_norm\": 0.54,\n \"acc_norm_stderr\": 0.05009082659620333\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.63,\n \"acc_stderr\": 0.048523658709391,\n \"acc_norm\": 0.63,\n \"acc_norm_stderr\": 0.048523658709391\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.42,\n \"acc_stderr\": 0.04960449637488584,\n \"acc_norm\": 0.42,\n \"acc_norm_stderr\": 0.04960449637488584\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.7109826589595376,\n \"acc_stderr\": 0.03456425745086999,\n \"acc_norm\": 0.7109826589595376,\n \"acc_norm_stderr\": 0.03456425745086999\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.5,\n \"acc_stderr\": 0.04975185951049946,\n \"acc_norm\": 0.5,\n \"acc_norm_stderr\": 0.04975185951049946\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.78,\n \"acc_stderr\": 0.04163331998932263,\n \"acc_norm\": 0.78,\n \"acc_norm_stderr\": 0.04163331998932263\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.676595744680851,\n \"acc_stderr\": 0.030579442773610337,\n \"acc_norm\": 0.676595744680851,\n \"acc_norm_stderr\": 0.030579442773610337\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.6140350877192983,\n \"acc_stderr\": 0.04579639422070435,\n \"acc_norm\": 0.6140350877192983,\n \"acc_norm_stderr\": 0.04579639422070435\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.6620689655172414,\n \"acc_stderr\": 0.039417076320648906,\n \"acc_norm\": 0.6620689655172414,\n \"acc_norm_stderr\": 0.039417076320648906\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.5026455026455027,\n \"acc_stderr\": 0.025750949678130387,\n \"acc_norm\": 0.5026455026455027,\n \"acc_norm_stderr\": 0.025750949678130387\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.5952380952380952,\n \"acc_stderr\": 0.04390259265377562,\n \"acc_norm\": 0.5952380952380952,\n \"acc_norm_stderr\": 0.04390259265377562\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.51,\n \"acc_stderr\": 0.05024183937956912,\n \"acc_norm\": 0.51,\n \"acc_norm_stderr\": 0.05024183937956912\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.8096774193548387,\n \"acc_stderr\": 0.02233170761182307,\n \"acc_norm\": 0.8096774193548387,\n \"acc_norm_stderr\": 0.02233170761182307\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.6502463054187192,\n \"acc_stderr\": 0.03355400904969566,\n \"acc_norm\": 0.6502463054187192,\n \"acc_norm_stderr\": 0.03355400904969566\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.73,\n \"acc_stderr\": 0.044619604333847394,\n \"acc_norm\": 0.73,\n \"acc_norm_stderr\": 0.044619604333847394\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.8121212121212121,\n \"acc_stderr\": 0.03050193405942914,\n \"acc_norm\": 0.8121212121212121,\n \"acc_norm_stderr\": 0.03050193405942914\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.8636363636363636,\n \"acc_stderr\": 0.024450155973189835,\n \"acc_norm\": 0.8636363636363636,\n \"acc_norm_stderr\": 0.024450155973189835\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.9481865284974094,\n \"acc_stderr\": 0.01599622932024412,\n \"acc_norm\": 0.9481865284974094,\n \"acc_norm_stderr\": 0.01599622932024412\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.7102564102564103,\n \"acc_stderr\": 0.02300062824368797,\n \"acc_norm\": 0.7102564102564103,\n \"acc_norm_stderr\": 0.02300062824368797\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.4,\n \"acc_stderr\": 0.029869605095316904,\n \"acc_norm\": 0.4,\n \"acc_norm_stderr\": 0.029869605095316904\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.7815126050420168,\n \"acc_stderr\": 0.02684151432295893,\n \"acc_norm\": 0.7815126050420168,\n \"acc_norm_stderr\": 0.02684151432295893\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.4900662251655629,\n \"acc_stderr\": 0.04081677107248436,\n \"acc_norm\": 0.4900662251655629,\n \"acc_norm_stderr\": 0.04081677107248436\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.8697247706422019,\n \"acc_stderr\": 0.014431862852473259,\n \"acc_norm\": 0.8697247706422019,\n \"acc_norm_stderr\": 0.014431862852473259\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.6203703703703703,\n \"acc_stderr\": 0.03309682581119035,\n \"acc_norm\": 0.6203703703703703,\n \"acc_norm_stderr\": 0.03309682581119035\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.8676470588235294,\n \"acc_stderr\": 0.02378429752091885,\n \"acc_norm\": 0.8676470588235294,\n \"acc_norm_stderr\": 0.02378429752091885\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.8987341772151899,\n \"acc_stderr\": 0.019637720526065515,\n \"acc_norm\": 0.8987341772151899,\n \"acc_norm_stderr\": 0.019637720526065515\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.7443946188340808,\n \"acc_stderr\": 0.029275891003969927,\n \"acc_norm\": 0.7443946188340808,\n \"acc_norm_stderr\": 0.029275891003969927\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.8244274809160306,\n \"acc_stderr\": 0.03336820338476074,\n \"acc_norm\": 0.8244274809160306,\n \"acc_norm_stderr\": 0.03336820338476074\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.859504132231405,\n \"acc_stderr\": 0.031722334260021585,\n \"acc_norm\": 0.859504132231405,\n \"acc_norm_stderr\": 0.031722334260021585\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.8425925925925926,\n \"acc_stderr\": 0.03520703990517963,\n \"acc_norm\": 0.8425925925925926,\n \"acc_norm_stderr\": 0.03520703990517963\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.7914110429447853,\n \"acc_stderr\": 0.031921934489347235,\n \"acc_norm\": 0.7914110429447853,\n \"acc_norm_stderr\": 0.031921934489347235\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.4642857142857143,\n \"acc_stderr\": 0.04733667890053756,\n \"acc_norm\": 0.4642857142857143,\n \"acc_norm_stderr\": 0.04733667890053756\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.8446601941747572,\n \"acc_stderr\": 0.03586594738573974,\n \"acc_norm\": 0.8446601941747572,\n \"acc_norm_stderr\": 0.03586594738573974\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.905982905982906,\n \"acc_stderr\": 0.019119892798924978,\n \"acc_norm\": 0.905982905982906,\n \"acc_norm_stderr\": 0.019119892798924978\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.82,\n \"acc_stderr\": 0.03861229196653694,\n \"acc_norm\": 0.82,\n \"acc_norm_stderr\": 0.03861229196653694\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.8914431673052363,\n \"acc_stderr\": 0.011124283175851199,\n \"acc_norm\": 0.8914431673052363,\n \"acc_norm_stderr\": 0.011124283175851199\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.7630057803468208,\n \"acc_stderr\": 0.02289408248992599,\n \"acc_norm\": 0.7630057803468208,\n \"acc_norm_stderr\": 0.02289408248992599\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.464804469273743,\n \"acc_stderr\": 0.016681020931076655,\n \"acc_norm\": 0.464804469273743,\n \"acc_norm_stderr\": 0.016681020931076655\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.7875816993464052,\n \"acc_stderr\": 0.023420375478296136,\n \"acc_norm\": 0.7875816993464052,\n \"acc_norm_stderr\": 0.023420375478296136\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.7813504823151125,\n \"acc_stderr\": 0.02347558141786111,\n \"acc_norm\": 0.7813504823151125,\n \"acc_norm_stderr\": 0.02347558141786111\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.8518518518518519,\n \"acc_stderr\": 0.019766459563597252,\n \"acc_norm\": 0.8518518518518519,\n \"acc_norm_stderr\": 0.019766459563597252\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.5602836879432624,\n \"acc_stderr\": 0.029609912075594113,\n \"acc_norm\": 0.5602836879432624,\n \"acc_norm_stderr\": 0.029609912075594113\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.5345501955671447,\n \"acc_stderr\": 0.012739711554045713,\n \"acc_norm\": 0.5345501955671447,\n \"acc_norm_stderr\": 0.012739711554045713\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.7573529411764706,\n \"acc_stderr\": 0.026040662474201257,\n \"acc_norm\": 0.7573529411764706,\n \"acc_norm_stderr\": 0.026040662474201257\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.7679738562091504,\n \"acc_stderr\": 0.017077373377856926,\n \"acc_norm\": 0.7679738562091504,\n \"acc_norm_stderr\": 0.017077373377856926\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.7090909090909091,\n \"acc_stderr\": 0.04350271442923243,\n \"acc_norm\": 0.7090909090909091,\n \"acc_norm_stderr\": 0.04350271442923243\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.8040816326530612,\n \"acc_stderr\": 0.025409301953225678,\n \"acc_norm\": 0.8040816326530612,\n \"acc_norm_stderr\": 0.025409301953225678\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.8606965174129353,\n \"acc_stderr\": 0.024484487162913973,\n \"acc_norm\": 0.8606965174129353,\n \"acc_norm_stderr\": 0.024484487162913973\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.9,\n \"acc_stderr\": 0.030151134457776334,\n \"acc_norm\": 0.9,\n \"acc_norm_stderr\": 0.030151134457776334\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.5,\n \"acc_stderr\": 0.03892494720807614,\n \"acc_norm\": 0.5,\n \"acc_norm_stderr\": 0.03892494720807614\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.8771929824561403,\n \"acc_stderr\": 0.025172984350155747,\n \"acc_norm\": 0.8771929824561403,\n \"acc_norm_stderr\": 0.025172984350155747\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.4173806609547124,\n \"mc1_stderr\": 0.01726289106327218,\n \"mc2\": 0.5746085023184442,\n \"mc2_stderr\": 0.015092112041318098\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.8310970797158642,\n \"acc_stderr\": 0.010529981411838899\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.5564821834723275,\n \"acc_stderr\": 0.013684327592606165\n }\n}\n```", "repo_url": "https://huggingface.co/hfl/chinese-mixtral-instruct", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_02_02T07_46_52.047789", "path": ["**/details_harness|arc:challenge|25_2024-02-02T07-46-52.047789.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-02-02T07-46-52.047789.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_02_02T07_46_52.047789", "path": ["**/details_harness|gsm8k|5_2024-02-02T07-46-52.047789.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-02-02T07-46-52.047789.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_02_02T07_46_52.047789", "path": ["**/details_harness|hellaswag|10_2024-02-02T07-46-52.047789.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-02-02T07-46-52.047789.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_02_02T07_46_52.047789", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-02T07-46-52.047789.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-02-02T07-46-52.047789.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-02-02T07-46-52.047789.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-02-02T07-46-52.047789.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-02T07-46-52.047789.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-02-02T07-46-52.047789.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-02-02T07-46-52.047789.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-02-02T07-46-52.047789.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-02-02T07-46-52.047789.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-02-02T07-46-52.047789.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-02-02T07-46-52.047789.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-02-02T07-46-52.047789.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-02T07-46-52.047789.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-02-02T07-46-52.047789.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-02T07-46-52.047789.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-02T07-46-52.047789.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-02-02T07-46-52.047789.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-02-02T07-46-52.047789.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-02-02T07-46-52.047789.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-02T07-46-52.047789.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-02T07-46-52.047789.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-02T07-46-52.047789.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-02-02T07-46-52.047789.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-02T07-46-52.047789.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-02T07-46-52.047789.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-02T07-46-52.047789.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-02T07-46-52.047789.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-02-02T07-46-52.047789.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-02T07-46-52.047789.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-02T07-46-52.047789.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-02T07-46-52.047789.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-02T07-46-52.047789.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-02-02T07-46-52.047789.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-02-02T07-46-52.047789.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-02-02T07-46-52.047789.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-02-02T07-46-52.047789.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-02T07-46-52.047789.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-02-02T07-46-52.047789.parquet", "**/details_harness|hendrycksTest-management|5_2024-02-02T07-46-52.047789.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-02-02T07-46-52.047789.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-02-02T07-46-52.047789.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-02-02T07-46-52.047789.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-02-02T07-46-52.047789.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-02T07-46-52.047789.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-02-02T07-46-52.047789.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-02-02T07-46-52.047789.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-02-02T07-46-52.047789.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-02-02T07-46-52.047789.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-02-02T07-46-52.047789.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-02-02T07-46-52.047789.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-02-02T07-46-52.047789.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-02-02T07-46-52.047789.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-02-02T07-46-52.047789.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-02-02T07-46-52.047789.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-02T07-46-52.047789.parquet", "**/details_harness|hendrycksTest-virology|5_2024-02-02T07-46-52.047789.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-02-02T07-46-52.047789.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-02T07-46-52.047789.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-02-02T07-46-52.047789.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-02-02T07-46-52.047789.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-02-02T07-46-52.047789.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-02T07-46-52.047789.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-02-02T07-46-52.047789.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-02-02T07-46-52.047789.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-02-02T07-46-52.047789.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-02-02T07-46-52.047789.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-02-02T07-46-52.047789.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-02-02T07-46-52.047789.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-02-02T07-46-52.047789.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-02T07-46-52.047789.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-02-02T07-46-52.047789.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-02T07-46-52.047789.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-02T07-46-52.047789.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-02-02T07-46-52.047789.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-02-02T07-46-52.047789.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-02-02T07-46-52.047789.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-02T07-46-52.047789.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-02T07-46-52.047789.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-02T07-46-52.047789.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-02-02T07-46-52.047789.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-02T07-46-52.047789.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-02T07-46-52.047789.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-02T07-46-52.047789.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-02T07-46-52.047789.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-02-02T07-46-52.047789.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-02T07-46-52.047789.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-02T07-46-52.047789.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-02T07-46-52.047789.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-02T07-46-52.047789.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-02-02T07-46-52.047789.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-02-02T07-46-52.047789.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-02-02T07-46-52.047789.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-02-02T07-46-52.047789.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-02T07-46-52.047789.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-02-02T07-46-52.047789.parquet", "**/details_harness|hendrycksTest-management|5_2024-02-02T07-46-52.047789.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-02-02T07-46-52.047789.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-02-02T07-46-52.047789.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-02-02T07-46-52.047789.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-02-02T07-46-52.047789.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-02T07-46-52.047789.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-02-02T07-46-52.047789.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-02-02T07-46-52.047789.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-02-02T07-46-52.047789.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-02-02T07-46-52.047789.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-02-02T07-46-52.047789.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-02-02T07-46-52.047789.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-02-02T07-46-52.047789.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-02-02T07-46-52.047789.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-02-02T07-46-52.047789.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-02-02T07-46-52.047789.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-02T07-46-52.047789.parquet", "**/details_harness|hendrycksTest-virology|5_2024-02-02T07-46-52.047789.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-02-02T07-46-52.047789.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_02_02T07_46_52.047789", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-02T07-46-52.047789.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-02T07-46-52.047789.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_02_02T07_46_52.047789", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-02-02T07-46-52.047789.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-02-02T07-46-52.047789.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_02_02T07_46_52.047789", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-02-02T07-46-52.047789.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-02-02T07-46-52.047789.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_02_02T07_46_52.047789", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-02-02T07-46-52.047789.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-02-02T07-46-52.047789.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_02_02T07_46_52.047789", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-02T07-46-52.047789.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-02T07-46-52.047789.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_02_02T07_46_52.047789", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-02-02T07-46-52.047789.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-02-02T07-46-52.047789.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_02_02T07_46_52.047789", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-02-02T07-46-52.047789.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-02-02T07-46-52.047789.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_02_02T07_46_52.047789", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-02-02T07-46-52.047789.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-02-02T07-46-52.047789.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_02_02T07_46_52.047789", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-02-02T07-46-52.047789.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-02-02T07-46-52.047789.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_02_02T07_46_52.047789", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-02-02T07-46-52.047789.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-02-02T07-46-52.047789.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_02_02T07_46_52.047789", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-02-02T07-46-52.047789.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-02-02T07-46-52.047789.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_02_02T07_46_52.047789", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-02-02T07-46-52.047789.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-02-02T07-46-52.047789.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_02_02T07_46_52.047789", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-02T07-46-52.047789.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-02T07-46-52.047789.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_02_02T07_46_52.047789", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-02-02T07-46-52.047789.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-02-02T07-46-52.047789.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_02_02T07_46_52.047789", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-02T07-46-52.047789.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-02T07-46-52.047789.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_02_02T07_46_52.047789", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-02T07-46-52.047789.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-02T07-46-52.047789.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_02_02T07_46_52.047789", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-02-02T07-46-52.047789.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-02-02T07-46-52.047789.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_02_02T07_46_52.047789", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-02-02T07-46-52.047789.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-02-02T07-46-52.047789.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_02_02T07_46_52.047789", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-02-02T07-46-52.047789.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-02-02T07-46-52.047789.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_02_02T07_46_52.047789", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-02T07-46-52.047789.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-02T07-46-52.047789.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_02_02T07_46_52.047789", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-02T07-46-52.047789.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-02T07-46-52.047789.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_02_02T07_46_52.047789", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-02T07-46-52.047789.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-02T07-46-52.047789.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_02_02T07_46_52.047789", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-02-02T07-46-52.047789.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-02-02T07-46-52.047789.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_02_02T07_46_52.047789", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-02T07-46-52.047789.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-02T07-46-52.047789.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_02_02T07_46_52.047789", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-02T07-46-52.047789.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-02T07-46-52.047789.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_02_02T07_46_52.047789", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-02T07-46-52.047789.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-02T07-46-52.047789.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_02_02T07_46_52.047789", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-02T07-46-52.047789.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-02T07-46-52.047789.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_02_02T07_46_52.047789", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-02-02T07-46-52.047789.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-02-02T07-46-52.047789.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_02_02T07_46_52.047789", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-02T07-46-52.047789.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-02T07-46-52.047789.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_02_02T07_46_52.047789", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-02T07-46-52.047789.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-02T07-46-52.047789.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_02_02T07_46_52.047789", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-02T07-46-52.047789.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-02T07-46-52.047789.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_02_02T07_46_52.047789", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-02T07-46-52.047789.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-02T07-46-52.047789.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_02_02T07_46_52.047789", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-02-02T07-46-52.047789.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-02-02T07-46-52.047789.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_02_02T07_46_52.047789", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-02-02T07-46-52.047789.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-02-02T07-46-52.047789.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_02_02T07_46_52.047789", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-02-02T07-46-52.047789.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-02-02T07-46-52.047789.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_02_02T07_46_52.047789", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-02-02T07-46-52.047789.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-02-02T07-46-52.047789.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_02_02T07_46_52.047789", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-02T07-46-52.047789.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-02T07-46-52.047789.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_02_02T07_46_52.047789", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-02-02T07-46-52.047789.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-02-02T07-46-52.047789.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_02_02T07_46_52.047789", "path": ["**/details_harness|hendrycksTest-management|5_2024-02-02T07-46-52.047789.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-02-02T07-46-52.047789.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_02_02T07_46_52.047789", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-02-02T07-46-52.047789.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-02-02T07-46-52.047789.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_02_02T07_46_52.047789", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-02-02T07-46-52.047789.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-02-02T07-46-52.047789.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_02_02T07_46_52.047789", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-02-02T07-46-52.047789.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-02-02T07-46-52.047789.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_02_02T07_46_52.047789", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-02-02T07-46-52.047789.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-02-02T07-46-52.047789.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_02_02T07_46_52.047789", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-02T07-46-52.047789.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-02T07-46-52.047789.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_02_02T07_46_52.047789", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-02-02T07-46-52.047789.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-02-02T07-46-52.047789.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_02_02T07_46_52.047789", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-02-02T07-46-52.047789.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-02-02T07-46-52.047789.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_02_02T07_46_52.047789", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-02-02T07-46-52.047789.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-02-02T07-46-52.047789.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_02_02T07_46_52.047789", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-02-02T07-46-52.047789.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-02-02T07-46-52.047789.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_02_02T07_46_52.047789", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-02-02T07-46-52.047789.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-02-02T07-46-52.047789.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_02_02T07_46_52.047789", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-02-02T07-46-52.047789.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-02-02T07-46-52.047789.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_02_02T07_46_52.047789", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-02-02T07-46-52.047789.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-02-02T07-46-52.047789.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_02_02T07_46_52.047789", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-02-02T07-46-52.047789.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-02-02T07-46-52.047789.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_02_02T07_46_52.047789", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-02-02T07-46-52.047789.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-02-02T07-46-52.047789.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_02_02T07_46_52.047789", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-02-02T07-46-52.047789.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-02-02T07-46-52.047789.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_02_02T07_46_52.047789", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-02T07-46-52.047789.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-02T07-46-52.047789.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_02_02T07_46_52.047789", "path": ["**/details_harness|hendrycksTest-virology|5_2024-02-02T07-46-52.047789.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-02-02T07-46-52.047789.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_02_02T07_46_52.047789", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-02-02T07-46-52.047789.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-02-02T07-46-52.047789.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_02_02T07_46_52.047789", "path": ["**/details_harness|truthfulqa:mc|0_2024-02-02T07-46-52.047789.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-02-02T07-46-52.047789.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_02_02T07_46_52.047789", "path": ["**/details_harness|winogrande|5_2024-02-02T07-46-52.047789.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-02-02T07-46-52.047789.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_02_02T07_46_52.047789", "path": ["results_2024-02-02T07-46-52.047789.parquet"]}, {"split": "latest", "path": ["results_2024-02-02T07-46-52.047789.parquet"]}]}]}
2024-02-02T07:49:32+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of hfl/chinese-mixtral-instruct Dataset automatically created during the evaluation run of model hfl/chinese-mixtral-instruct on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-02-02T07:46:52.047789(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of hfl/chinese-mixtral-instruct\n\n\n\nDataset automatically created during the evaluation run of model hfl/chinese-mixtral-instruct on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-02-02T07:46:52.047789(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of hfl/chinese-mixtral-instruct\n\n\n\nDataset automatically created during the evaluation run of model hfl/chinese-mixtral-instruct on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-02-02T07:46:52.047789(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
d48fc7467c99e61dafc0fc6e67c99cdee3b2917c
# Synthetic Search Query Parsing Instruction for Saiga family This is the version of [EmbeddingStudio/synthetic-search-queries-ru dataset](https://huggingface.co/datasets/EmbeddingStudio/synthetic-search-queries-ru) created the way to be aligned with [Saiga-Mistral-7B](https://huggingface.co/IlyaGusev/saiga_mistral_7b_lora) instruction format. ## Generation details We used synthetically generated query parsing instructions: * We generated lists of possible filters for 72 company categories: * [Raw version of filters dataset](https://huggingface.co/datasets/EmbeddingStudio/synthetic-search-filters-ru-raw) * [Split by representations](https://huggingface.co/datasets/EmbeddingStudio/synthetic-search-filters-ru) * Select randomly up-to 150 possible combinations (1-3 filters in each combination) of filters, the way each filter's representation appears maximum twice. * For a given category and combination we [generated](https://huggingface.co/datasets/EmbeddingStudio/synthetic-search-queries-ru) with GPT-4 Turbo: * 2 search queries and theirs parsed version with unstructured parts. * 2 search queries and theirs parsed version without unstructured part. * Using filters, queries and parsed version we prepared [27.42k saiga format instruction](https://huggingface.co/datasets/EmbeddingStudio/query-parsing-instructions-saiga) **Warning:** EmbeddingStudio team aware you that generated queries **weren't enough curated**, and will be curated later once we finish our product market fit stage ### Filters generation details We used GPT-4 Turbo to generate several possible filters for 72 company categroies. For each filter we also generated some possible representations. For examples filter `Date` can be represented as `dd/mm/YYYY`, `YYYY-mm-dd`, as words `2024 Января 17`, etc. ### Queries generation details We also used GPT-4 Turbo for generation of search queries and theirs parsed version. Main principles were: * If passed schema doesn't contain possible filter, do not generate query itself or a possible filter * If a selected representations combination contains enumeration, so we ask to map values in a search query and a parsed version. * If a selected representations combination contains pattern, so we ask GPT-4 Turbo to be aligned with a pattern ### Instructions generation details For the generation instructions we used following ideas: 1. Zero-Shot query parser should be schema agnostic. Cases like `snake_case, CamelCase, http-headers-like` should not ruin generation process. 2. Zero-Shot query parser should be spelling errors insensitive. 3. Training instructions should be in the following order: * Category * Schema * Query So LLM can be used in the following way: just generate embedding of category -> schema part, so inference will be faster. We assume, that `schema agnostic` termin means something wider, like to be able to work not only with JSONs, but also with HTML, Markdown, YAML, etc. We are working on it. So, what was our approach as an attempt to achieve these abilities: 1. For each query we generated a version with a mistake 2. Passed to each parsed version an additional field `Correct`, which contains a corrected version of a search query. 3. For each query we randomly selected and used a case for schema fields and a case for filter and representation names. 4. For each query we additionally generated two instuctions: * Where did we remove from a provided schema and parsed version one filter * Where did we remove from a provided schema and parsed version all related filters **Warning:** EmbeddingStudio team ask you to curate datasets on your own precisely. ## Instruction format ```markdown ### System: Master in Query Analysis ### Instruction: Organize queries in JSON, adhere to schema, verify spelling. #### Category: {your_company_category} #### Schema: ```{filters_schema}``` #### Query: {query} ### Response: ``` Filters schema is JSON-readable line in the format (we highly recommend you to use it): List of filters (dict): * Name - name of filter (better to be meaningful). * Representations - list of possible filter formats (dict): * Name - name of representation (better to be meaningful). * Type - python base type (int, float, str, bool). * Examples - list of examples. * Enum - if a representation is enumeration, provide a list of possible values, LLM should map parsed value into this list. * Pattern - if a representation is pattern-like (datetime, regexp, etc.) provide a pattern text in any format. Example: ```json [{"Name": "Customer_Ratings", "Representations": [{"Name": "Exact_Rating", "Type": "float", "Examples": [4.5, 3.2, 5.0, "4.5", "Unstructured"]}, {"Name": "Minimum_Rating", "Type": "float", "Examples": [4.0, 3.0, 5.0, "4.5"]}, {"Name": "Star_Rating", "Type": "int", "Examples": [4, 3, 5], "Enum": [1, 2, 3, 4, 5]}]}, {"Name": "Date", "Representations": [{"Name": "Day_Month_Year", "Type": "str", "Examples": ["01.01.2024", "15.06.2023", "31.12.2022", "25.12.2021", "20.07.2024", "15.06.2023"], "Pattern": "dd.mm.YYYY"}, {"Name": "Day_Name", "Type": "str", "Examples": ["Понедельник", "Вторник", "пн", "вт", "Среда", "Четверг"], "Enum": ["Понедельник", "Вторник", "Среда", "Четверг", "Пятница", "Суббота", "Воскресенье"]}]}, {"Name": "Date_Period", "Representations": [{"Name": "Specific_Period", "Type": "str", "Examples": ["01.01.2024 - 31.01.2024", "01.06.2023 - 30.06.2023", "01.12.2022 - 31.12.2022"], "Pattern": "dd.mm.YYYY - dd.mm.YYYY"}, {"Name": "Month", "Type": "str", "Examples": ["Январь", "Янв", "Декабрь"], "Enum": ["Январь", "Февраль", "Март", "Апрель", "Май", "Июнь", "Июль", "Август", "Сентябрь", "Октябрь", "Ноябрь", "Декабрь"]}, {"Name": "Quarter", "Type": "str", "Examples": ["Q1", "Q2", "Q3"], "Enum": ["Q1", "Q2", "Q3", "Q4"]}, {"Name": "Season", "Type": "str", "Examples": ["Winter", "Summer", "Autumn"], "Enum": ["Winter", "Spring", "Summer", "Autumn"]}]}, {"Name": "Destination_Country", "Representations": [{"Name": "Country_Name", "Type": "str", "Examples": ["United States", "Germany", "China"]}, {"Name": "Country_Code", "Type": "str", "Examples": ["US", "DE", "CN"]}, {"Name": "Country_Abbreviation", "Type": "str", "Examples": ["USA", "GER", "CHN"]}]}] ``` As the result, response will be JSON-readable line in the format: ```json [{"Value": "Corrected search phrase", "Name": "Correct"}, {"Name": "filter-name.representation", "Value": "some-value"}] ``` Field and representation names will be aligned with the provided schema. Example: ```json [{"Value": "приложение для новогодней акции, дедлайн 31 декабря", "Name": "Correct"}, {"Name": "Project-End-Date.Day-Month-Year", "Value": "31 декабря текущего года"}] ``` Used for fine-tuning `system` phrases: ```python [ "Эксперт по разбору поисковых запросов", "Мастер анализа поисковых запросов", "Первоклассный интерпретатор поисковых запросов", "Продвинутый декодер поисковых запросов", "Гений разбора поисковых запросов", "Волшебник разбора поисковых запросов", "Непревзойденный механизм разбора запросов", "Виртуоз разбора поисковых запросов", "Маэстро разбора запросов", ] ``` Used for fine-tuning `instruction` phrases: ```python [ "Преобразование запросов в JSON, соответствие схеме, обеспечение правильного написания.", "Анализ и структурирование запросов в JSON, поддержание схемы, проверка орфографии.", "Организация запросов в JSON, соблюдение схемы, верификация орфографии.", "Декодирование запросов в JSON, следование схеме, исправление орфографии.", "Разбор запросов в JSON, соответствие схеме, правильное написание.", "Преобразование запросов в структурированный JSON, соответствие схеме и орфографии.", "Реструктуризация запросов в JSON, соответствие схеме, точное написание.", "Перестановка запросов в JSON, строгое соблюдение схемы, поддержание орфографии.", "Гармонизация запросов с JSON схемой, обеспечение точности написания.", "Эффективное преобразование запросов в JSON, соответствие схеме, правильная орфография." ] ``` ## Train/test splitting principles As we are trying to fine-tune LLM to follow zero-shot query parsing instructions, so we want to test: * Ability to work well with unseen domain * Ability to work well with unseen filters * Ability to work well with unseen queries For these purposes we: 1. We put into test split 5 categories, completely separared from train: `Automotive, Educational Institutions, Enterprise Software Development, Payment Processing, Professional Social Networks`. 2. Also out of each appearing in train company categories, we put aside / removed one filter and queries related to it. 3. Selected 5% of other queries and put it into test. ## How to use it ```python from datasets import load_dataset queries_dataset = load_dataset('EmbeddingStudio/query-parsing-instructions-saiga') ```
EmbeddingStudio/query-parsing-instructions-saiga
[ "task_categories:token-classification", "task_categories:text-generation", "size_categories:10K<n<100K", "language:ru", "license:apache-2.0", "saiga", "mistral", "instuct", "zero-shot", "query parsing", "synthetic", "search-queries", "e-commerce", "online-shops", "travel-agencies", "educational-institutions-ai", "job-recruitment-automation", "banking-digital-services", "investment-ai-analysis", "insurance-tech-innovation", "financial-advisory-ai", "credit-services-automation", "payment-processing-tech", "mortgage-tech-solutions", "real-estate-digital-solutions", "taxation-tech-services", "risk-management-ai", "compliance-automation", "digital-banking-innovation", "mobile-banking-tech", "online-retail-tech", "offline-retail-automation", "automotive-dealership-tech", "restaurant-automation-tech", "food-delivery-ai", "entertainment-platforms-ai", "media-platforms-tech", "government-services-automation", "travel-tech-innovation", "consumer-analytics-ai", "logistics-tech-automation", "supply-chain-ai", "customer-support-tech", "market-research-ai", "mobile-app-dev-tech", "game-dev-ai", "cloud-computing-services", "data-analytics-ai", "business-intelligence-ai", "cybersecurity-software-tech", "ui-ux-design-ai", "iot-development-tech", "project-management-tools-ai", "version-control-systems-tech", "ci-cd-automation", "issue-tracking-ai", "bug-reporting-automation", "collaborative-dev-environments", "team-communication-tech", "task-time-management-ai", "customer-feedback-ai", "cloud-based-dev-tech", "image-stock-platforms-ai", "video-hosting-tech", "social-networks-ai", "professional-social-networks-ai", "dating-apps-tech", "region:us" ]
2024-02-02T07:51:28+00:00
{"language": ["ru"], "license": "apache-2.0", "size_categories": ["10K<n<100K"], "task_categories": ["token-classification", "text-generation"], "pretty_name": "Synthetic Search Query Parsing Instruction for Saiga family", "dataset_info": {"features": [{"name": "text", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 41107403, "num_examples": 20479}, {"name": "test", "num_bytes": 13985735, "num_examples": 6915}], "download_size": 16155342, "dataset_size": 55093138}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}, {"split": "test", "path": "data/test-*"}]}], "tags": ["saiga", "mistral", "instuct", "zero-shot", "query parsing", "synthetic", "search-queries", "e-commerce", "online-shops", "travel-agencies", "educational-institutions-ai", "job-recruitment-automation", "banking-digital-services", "investment-ai-analysis", "insurance-tech-innovation", "financial-advisory-ai", "credit-services-automation", "payment-processing-tech", "mortgage-tech-solutions", "real-estate-digital-solutions", "taxation-tech-services", "risk-management-ai", "compliance-automation", "digital-banking-innovation", "mobile-banking-tech", "online-retail-tech", "offline-retail-automation", "automotive-dealership-tech", "restaurant-automation-tech", "food-delivery-ai", "entertainment-platforms-ai", "media-platforms-tech", "government-services-automation", "travel-tech-innovation", "consumer-analytics-ai", "logistics-tech-automation", "supply-chain-ai", "customer-support-tech", "market-research-ai", "mobile-app-dev-tech", "game-dev-ai", "cloud-computing-services", "data-analytics-ai", "business-intelligence-ai", "cybersecurity-software-tech", "ui-ux-design-ai", "iot-development-tech", "project-management-tools-ai", "version-control-systems-tech", "ci-cd-automation", "issue-tracking-ai", "bug-reporting-automation", "collaborative-dev-environments", "team-communication-tech", "task-time-management-ai", "customer-feedback-ai", "cloud-based-dev-tech", "image-stock-platforms-ai", "video-hosting-tech", "social-networks-ai", "professional-social-networks-ai", "dating-apps-tech"]}
2024-02-02T11:56:51+00:00
[]
[ "ru" ]
TAGS #task_categories-token-classification #task_categories-text-generation #size_categories-10K<n<100K #language-Russian #license-apache-2.0 #saiga #mistral #instuct #zero-shot #query parsing #synthetic #search-queries #e-commerce #online-shops #travel-agencies #educational-institutions-ai #job-recruitment-automation #banking-digital-services #investment-ai-analysis #insurance-tech-innovation #financial-advisory-ai #credit-services-automation #payment-processing-tech #mortgage-tech-solutions #real-estate-digital-solutions #taxation-tech-services #risk-management-ai #compliance-automation #digital-banking-innovation #mobile-banking-tech #online-retail-tech #offline-retail-automation #automotive-dealership-tech #restaurant-automation-tech #food-delivery-ai #entertainment-platforms-ai #media-platforms-tech #government-services-automation #travel-tech-innovation #consumer-analytics-ai #logistics-tech-automation #supply-chain-ai #customer-support-tech #market-research-ai #mobile-app-dev-tech #game-dev-ai #cloud-computing-services #data-analytics-ai #business-intelligence-ai #cybersecurity-software-tech #ui-ux-design-ai #iot-development-tech #project-management-tools-ai #version-control-systems-tech #ci-cd-automation #issue-tracking-ai #bug-reporting-automation #collaborative-dev-environments #team-communication-tech #task-time-management-ai #customer-feedback-ai #cloud-based-dev-tech #image-stock-platforms-ai #video-hosting-tech #social-networks-ai #professional-social-networks-ai #dating-apps-tech #region-us
# Synthetic Search Query Parsing Instruction for Saiga family This is the version of EmbeddingStudio/synthetic-search-queries-ru dataset created the way to be aligned with Saiga-Mistral-7B instruction format. ## Generation details We used synthetically generated query parsing instructions: * We generated lists of possible filters for 72 company categories: * Raw version of filters dataset * Split by representations * Select randomly up-to 150 possible combinations (1-3 filters in each combination) of filters, the way each filter's representation appears maximum twice. * For a given category and combination we generated with GPT-4 Turbo: * 2 search queries and theirs parsed version with unstructured parts. * 2 search queries and theirs parsed version without unstructured part. * Using filters, queries and parsed version we prepared 27.42k saiga format instruction Warning: EmbeddingStudio team aware you that generated queries weren't enough curated, and will be curated later once we finish our product market fit stage ### Filters generation details We used GPT-4 Turbo to generate several possible filters for 72 company categroies. For each filter we also generated some possible representations. For examples filter 'Date' can be represented as 'dd/mm/YYYY', 'YYYY-mm-dd', as words '2024 Января 17', etc. ### Queries generation details We also used GPT-4 Turbo for generation of search queries and theirs parsed version. Main principles were: * If passed schema doesn't contain possible filter, do not generate query itself or a possible filter * If a selected representations combination contains enumeration, so we ask to map values in a search query and a parsed version. * If a selected representations combination contains pattern, so we ask GPT-4 Turbo to be aligned with a pattern ### Instructions generation details For the generation instructions we used following ideas: 1. Zero-Shot query parser should be schema agnostic. Cases like 'snake_case, CamelCase, http-headers-like' should not ruin generation process. 2. Zero-Shot query parser should be spelling errors insensitive. 3. Training instructions should be in the following order: * Category * Schema * Query So LLM can be used in the following way: just generate embedding of category -> schema part, so inference will be faster. We assume, that 'schema agnostic' termin means something wider, like to be able to work not only with JSONs, but also with HTML, Markdown, YAML, etc. We are working on it. So, what was our approach as an attempt to achieve these abilities: 1. For each query we generated a version with a mistake 2. Passed to each parsed version an additional field 'Correct', which contains a corrected version of a search query. 3. For each query we randomly selected and used a case for schema fields and a case for filter and representation names. 4. For each query we additionally generated two instuctions: * Where did we remove from a provided schema and parsed version one filter * Where did we remove from a provided schema and parsed version all related filters Warning: EmbeddingStudio team ask you to curate datasets on your own precisely. ## Instruction format {filters_schema} Filters schema is JSON-readable line in the format (we highly recommend you to use it): List of filters (dict): * Name - name of filter (better to be meaningful). * Representations - list of possible filter formats (dict): * Name - name of representation (better to be meaningful). * Type - python base type (int, float, str, bool). * Examples - list of examples. * Enum - if a representation is enumeration, provide a list of possible values, LLM should map parsed value into this list. * Pattern - if a representation is pattern-like (datetime, regexp, etc.) provide a pattern text in any format. Example: As the result, response will be JSON-readable line in the format: Field and representation names will be aligned with the provided schema. Example: Used for fine-tuning 'system' phrases: Used for fine-tuning 'instruction' phrases: ## Train/test splitting principles As we are trying to fine-tune LLM to follow zero-shot query parsing instructions, so we want to test: * Ability to work well with unseen domain * Ability to work well with unseen filters * Ability to work well with unseen queries For these purposes we: 1. We put into test split 5 categories, completely separared from train: 'Automotive, Educational Institutions, Enterprise Software Development, Payment Processing, Professional Social Networks'. 2. Also out of each appearing in train company categories, we put aside / removed one filter and queries related to it. 3. Selected 5% of other queries and put it into test. ## How to use it
[ "# Synthetic Search Query Parsing Instruction for Saiga family\n\nThis is the version of EmbeddingStudio/synthetic-search-queries-ru dataset created the way to be aligned with Saiga-Mistral-7B instruction format.", "## Generation details\n\nWe used synthetically generated query parsing instructions:\n* We generated lists of possible filters for 72 company categories: \n * Raw version of filters dataset\n * Split by representations\n* Select randomly up-to 150 possible combinations (1-3 filters in each combination) of filters, the way each filter's representation appears maximum twice.\n* For a given category and combination we generated with GPT-4 Turbo:\n * 2 search queries and theirs parsed version with unstructured parts.\n * 2 search queries and theirs parsed version without unstructured part. \n* Using filters, queries and parsed version we prepared 27.42k saiga format instruction\n\nWarning: EmbeddingStudio team aware you that generated queries weren't enough curated, and will be curated later once we finish our product market fit stage", "### Filters generation details\n\nWe used GPT-4 Turbo to generate several possible filters for 72 company categroies. For each filter we also generated some possible representations. For examples filter 'Date' can be represented as 'dd/mm/YYYY', 'YYYY-mm-dd', as words '2024 Января 17', etc.", "### Queries generation details\n\nWe also used GPT-4 Turbo for generation of search queries and theirs parsed version. Main principles were: \n* If passed schema doesn't contain possible filter, do not generate query itself or a possible filter \n* If a selected representations combination contains enumeration, so we ask to map values in a search query and a parsed version.\n* If a selected representations combination contains pattern, so we ask GPT-4 Turbo to be aligned with a pattern", "### Instructions generation details\n\nFor the generation instructions we used following ideas:\n1. Zero-Shot query parser should be schema agnostic. Cases like 'snake_case, CamelCase, http-headers-like' should not ruin generation process. \n2. Zero-Shot query parser should be spelling errors insensitive.\n3. Training instructions should be in the following order:\n * Category\n * Schema\n * Query\n \n So LLM can be used in the following way: just generate embedding of category -> schema part, so inference will be faster.\n\nWe assume, that 'schema agnostic' termin means something wider, like to be able to work not only with JSONs, but also with HTML, Markdown, YAML, etc. We are working on it.\n\nSo, what was our approach as an attempt to achieve these abilities:\n1. For each query we generated a version with a mistake\n2. Passed to each parsed version an additional field 'Correct', which contains a corrected version of a search query.\n3. For each query we randomly selected and used a case for schema fields and a case for filter and representation names.\n4. For each query we additionally generated two instuctions:\n * Where did we remove from a provided schema and parsed version one filter\n * Where did we remove from a provided schema and parsed version all related filters\n\nWarning: EmbeddingStudio team ask you to curate datasets on your own precisely.", "## Instruction format\n\n{filters_schema}\n\nFilters schema is JSON-readable line in the format (we highly recommend you to use it):\nList of filters (dict):\n* Name - name of filter (better to be meaningful).\n* Representations - list of possible filter formats (dict):\n * Name - name of representation (better to be meaningful).\n * Type - python base type (int, float, str, bool).\n * Examples - list of examples.\n * Enum - if a representation is enumeration, provide a list of possible values, LLM should map parsed value into this list.\n * Pattern - if a representation is pattern-like (datetime, regexp, etc.) provide a pattern text in any format.\n\nExample:\n\n\nAs the result, response will be JSON-readable line in the format:\n\n\nField and representation names will be aligned with the provided schema. Example:\n\nUsed for fine-tuning 'system' phrases:\n\n\nUsed for fine-tuning 'instruction' phrases:", "## Train/test splitting principles\n\nAs we are trying to fine-tune LLM to follow zero-shot query parsing instructions, so we want to test:\n* Ability to work well with unseen domain\n* Ability to work well with unseen filters\n* Ability to work well with unseen queries\n\nFor these purposes we:\n1. We put into test split 5 categories, completely separared from train: 'Automotive, Educational Institutions, Enterprise Software Development, Payment Processing, Professional Social Networks'.\n2. Also out of each appearing in train company categories, we put aside / removed one filter and queries related to it.\n3. Selected 5% of other queries and put it into test.", "## How to use it" ]
[ "TAGS\n#task_categories-token-classification #task_categories-text-generation #size_categories-10K<n<100K #language-Russian #license-apache-2.0 #saiga #mistral #instuct #zero-shot #query parsing #synthetic #search-queries #e-commerce #online-shops #travel-agencies #educational-institutions-ai #job-recruitment-automation #banking-digital-services #investment-ai-analysis #insurance-tech-innovation #financial-advisory-ai #credit-services-automation #payment-processing-tech #mortgage-tech-solutions #real-estate-digital-solutions #taxation-tech-services #risk-management-ai #compliance-automation #digital-banking-innovation #mobile-banking-tech #online-retail-tech #offline-retail-automation #automotive-dealership-tech #restaurant-automation-tech #food-delivery-ai #entertainment-platforms-ai #media-platforms-tech #government-services-automation #travel-tech-innovation #consumer-analytics-ai #logistics-tech-automation #supply-chain-ai #customer-support-tech #market-research-ai #mobile-app-dev-tech #game-dev-ai #cloud-computing-services #data-analytics-ai #business-intelligence-ai #cybersecurity-software-tech #ui-ux-design-ai #iot-development-tech #project-management-tools-ai #version-control-systems-tech #ci-cd-automation #issue-tracking-ai #bug-reporting-automation #collaborative-dev-environments #team-communication-tech #task-time-management-ai #customer-feedback-ai #cloud-based-dev-tech #image-stock-platforms-ai #video-hosting-tech #social-networks-ai #professional-social-networks-ai #dating-apps-tech #region-us \n", "# Synthetic Search Query Parsing Instruction for Saiga family\n\nThis is the version of EmbeddingStudio/synthetic-search-queries-ru dataset created the way to be aligned with Saiga-Mistral-7B instruction format.", "## Generation details\n\nWe used synthetically generated query parsing instructions:\n* We generated lists of possible filters for 72 company categories: \n * Raw version of filters dataset\n * Split by representations\n* Select randomly up-to 150 possible combinations (1-3 filters in each combination) of filters, the way each filter's representation appears maximum twice.\n* For a given category and combination we generated with GPT-4 Turbo:\n * 2 search queries and theirs parsed version with unstructured parts.\n * 2 search queries and theirs parsed version without unstructured part. \n* Using filters, queries and parsed version we prepared 27.42k saiga format instruction\n\nWarning: EmbeddingStudio team aware you that generated queries weren't enough curated, and will be curated later once we finish our product market fit stage", "### Filters generation details\n\nWe used GPT-4 Turbo to generate several possible filters for 72 company categroies. For each filter we also generated some possible representations. For examples filter 'Date' can be represented as 'dd/mm/YYYY', 'YYYY-mm-dd', as words '2024 Января 17', etc.", "### Queries generation details\n\nWe also used GPT-4 Turbo for generation of search queries and theirs parsed version. Main principles were: \n* If passed schema doesn't contain possible filter, do not generate query itself or a possible filter \n* If a selected representations combination contains enumeration, so we ask to map values in a search query and a parsed version.\n* If a selected representations combination contains pattern, so we ask GPT-4 Turbo to be aligned with a pattern", "### Instructions generation details\n\nFor the generation instructions we used following ideas:\n1. Zero-Shot query parser should be schema agnostic. Cases like 'snake_case, CamelCase, http-headers-like' should not ruin generation process. \n2. Zero-Shot query parser should be spelling errors insensitive.\n3. Training instructions should be in the following order:\n * Category\n * Schema\n * Query\n \n So LLM can be used in the following way: just generate embedding of category -> schema part, so inference will be faster.\n\nWe assume, that 'schema agnostic' termin means something wider, like to be able to work not only with JSONs, but also with HTML, Markdown, YAML, etc. We are working on it.\n\nSo, what was our approach as an attempt to achieve these abilities:\n1. For each query we generated a version with a mistake\n2. Passed to each parsed version an additional field 'Correct', which contains a corrected version of a search query.\n3. For each query we randomly selected and used a case for schema fields and a case for filter and representation names.\n4. For each query we additionally generated two instuctions:\n * Where did we remove from a provided schema and parsed version one filter\n * Where did we remove from a provided schema and parsed version all related filters\n\nWarning: EmbeddingStudio team ask you to curate datasets on your own precisely.", "## Instruction format\n\n{filters_schema}\n\nFilters schema is JSON-readable line in the format (we highly recommend you to use it):\nList of filters (dict):\n* Name - name of filter (better to be meaningful).\n* Representations - list of possible filter formats (dict):\n * Name - name of representation (better to be meaningful).\n * Type - python base type (int, float, str, bool).\n * Examples - list of examples.\n * Enum - if a representation is enumeration, provide a list of possible values, LLM should map parsed value into this list.\n * Pattern - if a representation is pattern-like (datetime, regexp, etc.) provide a pattern text in any format.\n\nExample:\n\n\nAs the result, response will be JSON-readable line in the format:\n\n\nField and representation names will be aligned with the provided schema. Example:\n\nUsed for fine-tuning 'system' phrases:\n\n\nUsed for fine-tuning 'instruction' phrases:", "## Train/test splitting principles\n\nAs we are trying to fine-tune LLM to follow zero-shot query parsing instructions, so we want to test:\n* Ability to work well with unseen domain\n* Ability to work well with unseen filters\n* Ability to work well with unseen queries\n\nFor these purposes we:\n1. We put into test split 5 categories, completely separared from train: 'Automotive, Educational Institutions, Enterprise Software Development, Payment Processing, Professional Social Networks'.\n2. Also out of each appearing in train company categories, we put aside / removed one filter and queries related to it.\n3. Selected 5% of other queries and put it into test.", "## How to use it" ]
4d784e9b619f16c16e7d07d229450e1424d08e47
# Dataset Card for Evaluation run of JaeyeonKang/CCK_Gony_v0.2 <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [JaeyeonKang/CCK_Gony_v0.2](https://huggingface.co/JaeyeonKang/CCK_Gony_v0.2) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_JaeyeonKang__CCK_Gony_v0.2", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-02-02T07:55:59.515019](https://huggingface.co/datasets/open-llm-leaderboard/details_JaeyeonKang__CCK_Gony_v0.2/blob/main/results_2024-02-02T07-55-59.515019.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.7010040167512416, "acc_stderr": 0.03046871961713868, "acc_norm": 0.705796779111703, "acc_norm_stderr": 0.03105821273167017, "mc1": 0.4479804161566707, "mc1_stderr": 0.017408513063422906, "mc2": 0.5946296550939176, "mc2_stderr": 0.01480851612780029 }, "harness|arc:challenge|25": { "acc": 0.6493174061433447, "acc_stderr": 0.013944635930726099, "acc_norm": 0.6885665529010239, "acc_norm_stderr": 0.013532472099850939 }, "harness|hellaswag|10": { "acc": 0.6690898227444733, "acc_stderr": 0.00469579134050288, "acc_norm": 0.8660625373431587, "acc_norm_stderr": 0.0033988905252296995 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.33, "acc_stderr": 0.04725815626252606, "acc_norm": 0.33, "acc_norm_stderr": 0.04725815626252606 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.6888888888888889, "acc_stderr": 0.03999262876617721, "acc_norm": 0.6888888888888889, "acc_norm_stderr": 0.03999262876617721 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.756578947368421, "acc_stderr": 0.034923496688842384, "acc_norm": 0.756578947368421, "acc_norm_stderr": 0.034923496688842384 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.71, "acc_stderr": 0.045604802157206845, "acc_norm": 0.71, "acc_norm_stderr": 0.045604802157206845 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.7622641509433963, "acc_stderr": 0.02619980880756193, "acc_norm": 0.7622641509433963, "acc_norm_stderr": 0.02619980880756193 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.8055555555555556, "acc_stderr": 0.03309615177059006, "acc_norm": 0.8055555555555556, "acc_norm_stderr": 0.03309615177059006 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.51, "acc_stderr": 0.05024183937956912, "acc_norm": 0.51, "acc_norm_stderr": 0.05024183937956912 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.67, "acc_stderr": 0.047258156262526066, "acc_norm": 0.67, "acc_norm_stderr": 0.047258156262526066 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.44, "acc_stderr": 0.04988876515698589, "acc_norm": 0.44, "acc_norm_stderr": 0.04988876515698589 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.7225433526011561, "acc_stderr": 0.03414014007044036, "acc_norm": 0.7225433526011561, "acc_norm_stderr": 0.03414014007044036 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.45098039215686275, "acc_stderr": 0.049512182523962625, "acc_norm": 0.45098039215686275, "acc_norm_stderr": 0.049512182523962625 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.8, "acc_stderr": 0.04020151261036846, "acc_norm": 0.8, "acc_norm_stderr": 0.04020151261036846 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.6127659574468085, "acc_stderr": 0.03184389265339526, "acc_norm": 0.6127659574468085, "acc_norm_stderr": 0.03184389265339526 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.5614035087719298, "acc_stderr": 0.04668000738510455, "acc_norm": 0.5614035087719298, "acc_norm_stderr": 0.04668000738510455 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.6413793103448275, "acc_stderr": 0.039966295748767186, "acc_norm": 0.6413793103448275, "acc_norm_stderr": 0.039966295748767186 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.49206349206349204, "acc_stderr": 0.025748065871673286, "acc_norm": 0.49206349206349204, "acc_norm_stderr": 0.025748065871673286 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.48412698412698413, "acc_stderr": 0.04469881854072606, "acc_norm": 0.48412698412698413, "acc_norm_stderr": 0.04469881854072606 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.44, "acc_stderr": 0.04988876515698589, "acc_norm": 0.44, "acc_norm_stderr": 0.04988876515698589 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.8516129032258064, "acc_stderr": 0.020222737554330378, "acc_norm": 0.8516129032258064, "acc_norm_stderr": 0.020222737554330378 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.5665024630541872, "acc_stderr": 0.034867317274198714, "acc_norm": 0.5665024630541872, "acc_norm_stderr": 0.034867317274198714 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.76, "acc_stderr": 0.04292346959909281, "acc_norm": 0.76, "acc_norm_stderr": 0.04292346959909281 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.7818181818181819, "acc_stderr": 0.03225078108306289, "acc_norm": 0.7818181818181819, "acc_norm_stderr": 0.03225078108306289 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.8787878787878788, "acc_stderr": 0.023253157951942088, "acc_norm": 0.8787878787878788, "acc_norm_stderr": 0.023253157951942088 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.9585492227979274, "acc_stderr": 0.01438543285747646, "acc_norm": 0.9585492227979274, "acc_norm_stderr": 0.01438543285747646 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.7153846153846154, "acc_stderr": 0.0228783227997063, "acc_norm": 0.7153846153846154, "acc_norm_stderr": 0.0228783227997063 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.4, "acc_stderr": 0.0298696050953169, "acc_norm": 0.4, "acc_norm_stderr": 0.0298696050953169 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.7773109243697479, "acc_stderr": 0.027025433498882392, "acc_norm": 0.7773109243697479, "acc_norm_stderr": 0.027025433498882392 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.4105960264900662, "acc_stderr": 0.040166895948499266, "acc_norm": 0.4105960264900662, "acc_norm_stderr": 0.040166895948499266 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.8788990825688073, "acc_stderr": 0.013987618292389713, "acc_norm": 0.8788990825688073, "acc_norm_stderr": 0.013987618292389713 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.5879629629629629, "acc_stderr": 0.03356787758160831, "acc_norm": 0.5879629629629629, "acc_norm_stderr": 0.03356787758160831 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.8725490196078431, "acc_stderr": 0.02340553048084631, "acc_norm": 0.8725490196078431, "acc_norm_stderr": 0.02340553048084631 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.8860759493670886, "acc_stderr": 0.020681745135884562, "acc_norm": 0.8860759493670886, "acc_norm_stderr": 0.020681745135884562 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.7130044843049327, "acc_stderr": 0.03036037971029195, "acc_norm": 0.7130044843049327, "acc_norm_stderr": 0.03036037971029195 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.8015267175572519, "acc_stderr": 0.0349814938546247, "acc_norm": 0.8015267175572519, "acc_norm_stderr": 0.0349814938546247 }, "harness|hendrycksTest-international_law|5": { "acc": 0.8760330578512396, "acc_stderr": 0.030083098716035202, "acc_norm": 0.8760330578512396, "acc_norm_stderr": 0.030083098716035202 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.7962962962962963, "acc_stderr": 0.038935425188248475, "acc_norm": 0.7962962962962963, "acc_norm_stderr": 0.038935425188248475 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.8282208588957055, "acc_stderr": 0.02963471727237103, "acc_norm": 0.8282208588957055, "acc_norm_stderr": 0.02963471727237103 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.5714285714285714, "acc_stderr": 0.04697113923010213, "acc_norm": 0.5714285714285714, "acc_norm_stderr": 0.04697113923010213 }, "harness|hendrycksTest-management|5": { "acc": 0.8543689320388349, "acc_stderr": 0.034926064766237906, "acc_norm": 0.8543689320388349, "acc_norm_stderr": 0.034926064766237906 }, "harness|hendrycksTest-marketing|5": { "acc": 0.9230769230769231, "acc_stderr": 0.017456987872436186, "acc_norm": 0.9230769230769231, "acc_norm_stderr": 0.017456987872436186 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.78, "acc_stderr": 0.04163331998932261, "acc_norm": 0.78, "acc_norm_stderr": 0.04163331998932261 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.8735632183908046, "acc_stderr": 0.01188448890589553, "acc_norm": 0.8735632183908046, "acc_norm_stderr": 0.01188448890589553 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.7716763005780347, "acc_stderr": 0.022598703804321628, "acc_norm": 0.7716763005780347, "acc_norm_stderr": 0.022598703804321628 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.4893854748603352, "acc_stderr": 0.0167187329411921, "acc_norm": 0.4893854748603352, "acc_norm_stderr": 0.0167187329411921 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.826797385620915, "acc_stderr": 0.021668400256514272, "acc_norm": 0.826797385620915, "acc_norm_stderr": 0.021668400256514272 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.7781350482315113, "acc_stderr": 0.023598858292863047, "acc_norm": 0.7781350482315113, "acc_norm_stderr": 0.023598858292863047 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.7993827160493827, "acc_stderr": 0.02228231394977487, "acc_norm": 0.7993827160493827, "acc_norm_stderr": 0.02228231394977487 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.5035460992907801, "acc_stderr": 0.02982674915328092, "acc_norm": 0.5035460992907801, "acc_norm_stderr": 0.02982674915328092 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.5280312907431551, "acc_stderr": 0.012750151802922447, "acc_norm": 0.5280312907431551, "acc_norm_stderr": 0.012750151802922447 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.7904411764705882, "acc_stderr": 0.02472311040767708, "acc_norm": 0.7904411764705882, "acc_norm_stderr": 0.02472311040767708 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.7483660130718954, "acc_stderr": 0.01755581809132227, "acc_norm": 0.7483660130718954, "acc_norm_stderr": 0.01755581809132227 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.7, "acc_stderr": 0.04389311454644287, "acc_norm": 0.7, "acc_norm_stderr": 0.04389311454644287 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.7591836734693878, "acc_stderr": 0.02737294220178816, "acc_norm": 0.7591836734693878, "acc_norm_stderr": 0.02737294220178816 }, "harness|hendrycksTest-sociology|5": { "acc": 0.8905472636815921, "acc_stderr": 0.022076326101824636, "acc_norm": 0.8905472636815921, "acc_norm_stderr": 0.022076326101824636 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.89, "acc_stderr": 0.03144660377352202, "acc_norm": 0.89, "acc_norm_stderr": 0.03144660377352202 }, "harness|hendrycksTest-virology|5": { "acc": 0.5301204819277109, "acc_stderr": 0.03885425420866767, "acc_norm": 0.5301204819277109, "acc_norm_stderr": 0.03885425420866767 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.8596491228070176, "acc_stderr": 0.0266405825391332, "acc_norm": 0.8596491228070176, "acc_norm_stderr": 0.0266405825391332 }, "harness|truthfulqa:mc|0": { "mc1": 0.4479804161566707, "mc1_stderr": 0.017408513063422906, "mc2": 0.5946296550939176, "mc2_stderr": 0.01480851612780029 }, "harness|winogrande|5": { "acc": 0.823993685872139, "acc_stderr": 0.010703090882320708 }, "harness|gsm8k|5": { "acc": 0.5314632297194845, "acc_stderr": 0.013745189948450412 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_JaeyeonKang__CCK_Gony_v0.2
[ "region:us" ]
2024-02-02T07:58:17+00:00
{"pretty_name": "Evaluation run of JaeyeonKang/CCK_Gony_v0.2", "dataset_summary": "Dataset automatically created during the evaluation run of model [JaeyeonKang/CCK_Gony_v0.2](https://huggingface.co/JaeyeonKang/CCK_Gony_v0.2) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_JaeyeonKang__CCK_Gony_v0.2\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-02-02T07:55:59.515019](https://huggingface.co/datasets/open-llm-leaderboard/details_JaeyeonKang__CCK_Gony_v0.2/blob/main/results_2024-02-02T07-55-59.515019.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.7010040167512416,\n \"acc_stderr\": 0.03046871961713868,\n \"acc_norm\": 0.705796779111703,\n \"acc_norm_stderr\": 0.03105821273167017,\n \"mc1\": 0.4479804161566707,\n \"mc1_stderr\": 0.017408513063422906,\n \"mc2\": 0.5946296550939176,\n \"mc2_stderr\": 0.01480851612780029\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.6493174061433447,\n \"acc_stderr\": 0.013944635930726099,\n \"acc_norm\": 0.6885665529010239,\n \"acc_norm_stderr\": 0.013532472099850939\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.6690898227444733,\n \"acc_stderr\": 0.00469579134050288,\n \"acc_norm\": 0.8660625373431587,\n \"acc_norm_stderr\": 0.0033988905252296995\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.33,\n \"acc_stderr\": 0.04725815626252606,\n \"acc_norm\": 0.33,\n \"acc_norm_stderr\": 0.04725815626252606\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.6888888888888889,\n \"acc_stderr\": 0.03999262876617721,\n \"acc_norm\": 0.6888888888888889,\n \"acc_norm_stderr\": 0.03999262876617721\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.756578947368421,\n \"acc_stderr\": 0.034923496688842384,\n \"acc_norm\": 0.756578947368421,\n \"acc_norm_stderr\": 0.034923496688842384\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.71,\n \"acc_stderr\": 0.045604802157206845,\n \"acc_norm\": 0.71,\n \"acc_norm_stderr\": 0.045604802157206845\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.7622641509433963,\n \"acc_stderr\": 0.02619980880756193,\n \"acc_norm\": 0.7622641509433963,\n \"acc_norm_stderr\": 0.02619980880756193\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.8055555555555556,\n \"acc_stderr\": 0.03309615177059006,\n \"acc_norm\": 0.8055555555555556,\n \"acc_norm_stderr\": 0.03309615177059006\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.51,\n \"acc_stderr\": 0.05024183937956912,\n \"acc_norm\": 0.51,\n \"acc_norm_stderr\": 0.05024183937956912\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.67,\n \"acc_stderr\": 0.047258156262526066,\n \"acc_norm\": 0.67,\n \"acc_norm_stderr\": 0.047258156262526066\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.44,\n \"acc_stderr\": 0.04988876515698589,\n \"acc_norm\": 0.44,\n \"acc_norm_stderr\": 0.04988876515698589\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.7225433526011561,\n \"acc_stderr\": 0.03414014007044036,\n \"acc_norm\": 0.7225433526011561,\n \"acc_norm_stderr\": 0.03414014007044036\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.45098039215686275,\n \"acc_stderr\": 0.049512182523962625,\n \"acc_norm\": 0.45098039215686275,\n \"acc_norm_stderr\": 0.049512182523962625\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.8,\n \"acc_stderr\": 0.04020151261036846,\n \"acc_norm\": 0.8,\n \"acc_norm_stderr\": 0.04020151261036846\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.6127659574468085,\n \"acc_stderr\": 0.03184389265339526,\n \"acc_norm\": 0.6127659574468085,\n \"acc_norm_stderr\": 0.03184389265339526\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.5614035087719298,\n \"acc_stderr\": 0.04668000738510455,\n \"acc_norm\": 0.5614035087719298,\n \"acc_norm_stderr\": 0.04668000738510455\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.6413793103448275,\n \"acc_stderr\": 0.039966295748767186,\n \"acc_norm\": 0.6413793103448275,\n \"acc_norm_stderr\": 0.039966295748767186\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.49206349206349204,\n \"acc_stderr\": 0.025748065871673286,\n \"acc_norm\": 0.49206349206349204,\n \"acc_norm_stderr\": 0.025748065871673286\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.48412698412698413,\n \"acc_stderr\": 0.04469881854072606,\n \"acc_norm\": 0.48412698412698413,\n \"acc_norm_stderr\": 0.04469881854072606\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.44,\n \"acc_stderr\": 0.04988876515698589,\n \"acc_norm\": 0.44,\n \"acc_norm_stderr\": 0.04988876515698589\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.8516129032258064,\n \"acc_stderr\": 0.020222737554330378,\n \"acc_norm\": 0.8516129032258064,\n \"acc_norm_stderr\": 0.020222737554330378\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.5665024630541872,\n \"acc_stderr\": 0.034867317274198714,\n \"acc_norm\": 0.5665024630541872,\n \"acc_norm_stderr\": 0.034867317274198714\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.76,\n \"acc_stderr\": 0.04292346959909281,\n \"acc_norm\": 0.76,\n \"acc_norm_stderr\": 0.04292346959909281\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.7818181818181819,\n \"acc_stderr\": 0.03225078108306289,\n \"acc_norm\": 0.7818181818181819,\n \"acc_norm_stderr\": 0.03225078108306289\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.8787878787878788,\n \"acc_stderr\": 0.023253157951942088,\n \"acc_norm\": 0.8787878787878788,\n \"acc_norm_stderr\": 0.023253157951942088\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.9585492227979274,\n \"acc_stderr\": 0.01438543285747646,\n \"acc_norm\": 0.9585492227979274,\n \"acc_norm_stderr\": 0.01438543285747646\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.7153846153846154,\n \"acc_stderr\": 0.0228783227997063,\n \"acc_norm\": 0.7153846153846154,\n \"acc_norm_stderr\": 0.0228783227997063\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.4,\n \"acc_stderr\": 0.0298696050953169,\n \"acc_norm\": 0.4,\n \"acc_norm_stderr\": 0.0298696050953169\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.7773109243697479,\n \"acc_stderr\": 0.027025433498882392,\n \"acc_norm\": 0.7773109243697479,\n \"acc_norm_stderr\": 0.027025433498882392\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.4105960264900662,\n \"acc_stderr\": 0.040166895948499266,\n \"acc_norm\": 0.4105960264900662,\n \"acc_norm_stderr\": 0.040166895948499266\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.8788990825688073,\n \"acc_stderr\": 0.013987618292389713,\n \"acc_norm\": 0.8788990825688073,\n \"acc_norm_stderr\": 0.013987618292389713\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.5879629629629629,\n \"acc_stderr\": 0.03356787758160831,\n \"acc_norm\": 0.5879629629629629,\n \"acc_norm_stderr\": 0.03356787758160831\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.8725490196078431,\n \"acc_stderr\": 0.02340553048084631,\n \"acc_norm\": 0.8725490196078431,\n \"acc_norm_stderr\": 0.02340553048084631\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.8860759493670886,\n \"acc_stderr\": 0.020681745135884562,\n \"acc_norm\": 0.8860759493670886,\n \"acc_norm_stderr\": 0.020681745135884562\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.7130044843049327,\n \"acc_stderr\": 0.03036037971029195,\n \"acc_norm\": 0.7130044843049327,\n \"acc_norm_stderr\": 0.03036037971029195\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.8015267175572519,\n \"acc_stderr\": 0.0349814938546247,\n \"acc_norm\": 0.8015267175572519,\n \"acc_norm_stderr\": 0.0349814938546247\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.8760330578512396,\n \"acc_stderr\": 0.030083098716035202,\n \"acc_norm\": 0.8760330578512396,\n \"acc_norm_stderr\": 0.030083098716035202\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.7962962962962963,\n \"acc_stderr\": 0.038935425188248475,\n \"acc_norm\": 0.7962962962962963,\n \"acc_norm_stderr\": 0.038935425188248475\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.8282208588957055,\n \"acc_stderr\": 0.02963471727237103,\n \"acc_norm\": 0.8282208588957055,\n \"acc_norm_stderr\": 0.02963471727237103\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.5714285714285714,\n \"acc_stderr\": 0.04697113923010213,\n \"acc_norm\": 0.5714285714285714,\n \"acc_norm_stderr\": 0.04697113923010213\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.8543689320388349,\n \"acc_stderr\": 0.034926064766237906,\n \"acc_norm\": 0.8543689320388349,\n \"acc_norm_stderr\": 0.034926064766237906\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.9230769230769231,\n \"acc_stderr\": 0.017456987872436186,\n \"acc_norm\": 0.9230769230769231,\n \"acc_norm_stderr\": 0.017456987872436186\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.78,\n \"acc_stderr\": 0.04163331998932261,\n \"acc_norm\": 0.78,\n \"acc_norm_stderr\": 0.04163331998932261\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.8735632183908046,\n \"acc_stderr\": 0.01188448890589553,\n \"acc_norm\": 0.8735632183908046,\n \"acc_norm_stderr\": 0.01188448890589553\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.7716763005780347,\n \"acc_stderr\": 0.022598703804321628,\n \"acc_norm\": 0.7716763005780347,\n \"acc_norm_stderr\": 0.022598703804321628\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.4893854748603352,\n \"acc_stderr\": 0.0167187329411921,\n \"acc_norm\": 0.4893854748603352,\n \"acc_norm_stderr\": 0.0167187329411921\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.826797385620915,\n \"acc_stderr\": 0.021668400256514272,\n \"acc_norm\": 0.826797385620915,\n \"acc_norm_stderr\": 0.021668400256514272\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.7781350482315113,\n \"acc_stderr\": 0.023598858292863047,\n \"acc_norm\": 0.7781350482315113,\n \"acc_norm_stderr\": 0.023598858292863047\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.7993827160493827,\n \"acc_stderr\": 0.02228231394977487,\n \"acc_norm\": 0.7993827160493827,\n \"acc_norm_stderr\": 0.02228231394977487\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.5035460992907801,\n \"acc_stderr\": 0.02982674915328092,\n \"acc_norm\": 0.5035460992907801,\n \"acc_norm_stderr\": 0.02982674915328092\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.5280312907431551,\n \"acc_stderr\": 0.012750151802922447,\n \"acc_norm\": 0.5280312907431551,\n \"acc_norm_stderr\": 0.012750151802922447\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.7904411764705882,\n \"acc_stderr\": 0.02472311040767708,\n \"acc_norm\": 0.7904411764705882,\n \"acc_norm_stderr\": 0.02472311040767708\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.7483660130718954,\n \"acc_stderr\": 0.01755581809132227,\n \"acc_norm\": 0.7483660130718954,\n \"acc_norm_stderr\": 0.01755581809132227\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.7,\n \"acc_stderr\": 0.04389311454644287,\n \"acc_norm\": 0.7,\n \"acc_norm_stderr\": 0.04389311454644287\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.7591836734693878,\n \"acc_stderr\": 0.02737294220178816,\n \"acc_norm\": 0.7591836734693878,\n \"acc_norm_stderr\": 0.02737294220178816\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.8905472636815921,\n \"acc_stderr\": 0.022076326101824636,\n \"acc_norm\": 0.8905472636815921,\n \"acc_norm_stderr\": 0.022076326101824636\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.89,\n \"acc_stderr\": 0.03144660377352202,\n \"acc_norm\": 0.89,\n \"acc_norm_stderr\": 0.03144660377352202\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.5301204819277109,\n \"acc_stderr\": 0.03885425420866767,\n \"acc_norm\": 0.5301204819277109,\n \"acc_norm_stderr\": 0.03885425420866767\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.8596491228070176,\n \"acc_stderr\": 0.0266405825391332,\n \"acc_norm\": 0.8596491228070176,\n \"acc_norm_stderr\": 0.0266405825391332\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.4479804161566707,\n \"mc1_stderr\": 0.017408513063422906,\n \"mc2\": 0.5946296550939176,\n \"mc2_stderr\": 0.01480851612780029\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.823993685872139,\n \"acc_stderr\": 0.010703090882320708\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.5314632297194845,\n \"acc_stderr\": 0.013745189948450412\n }\n}\n```", "repo_url": "https://huggingface.co/JaeyeonKang/CCK_Gony_v0.2", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_02_02T07_55_59.515019", "path": ["**/details_harness|arc:challenge|25_2024-02-02T07-55-59.515019.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-02-02T07-55-59.515019.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_02_02T07_55_59.515019", "path": ["**/details_harness|gsm8k|5_2024-02-02T07-55-59.515019.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-02-02T07-55-59.515019.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_02_02T07_55_59.515019", "path": ["**/details_harness|hellaswag|10_2024-02-02T07-55-59.515019.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-02-02T07-55-59.515019.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_02_02T07_55_59.515019", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-02T07-55-59.515019.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-02-02T07-55-59.515019.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-02-02T07-55-59.515019.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-02-02T07-55-59.515019.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-02T07-55-59.515019.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-02-02T07-55-59.515019.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-02-02T07-55-59.515019.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-02-02T07-55-59.515019.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-02-02T07-55-59.515019.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-02-02T07-55-59.515019.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-02-02T07-55-59.515019.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-02-02T07-55-59.515019.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-02T07-55-59.515019.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-02-02T07-55-59.515019.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-02T07-55-59.515019.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-02T07-55-59.515019.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-02-02T07-55-59.515019.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-02-02T07-55-59.515019.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-02-02T07-55-59.515019.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-02T07-55-59.515019.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-02T07-55-59.515019.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-02T07-55-59.515019.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-02-02T07-55-59.515019.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-02T07-55-59.515019.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-02T07-55-59.515019.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-02T07-55-59.515019.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-02T07-55-59.515019.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-02-02T07-55-59.515019.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-02T07-55-59.515019.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-02T07-55-59.515019.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-02T07-55-59.515019.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-02T07-55-59.515019.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-02-02T07-55-59.515019.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-02-02T07-55-59.515019.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-02-02T07-55-59.515019.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-02-02T07-55-59.515019.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-02T07-55-59.515019.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-02-02T07-55-59.515019.parquet", "**/details_harness|hendrycksTest-management|5_2024-02-02T07-55-59.515019.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-02-02T07-55-59.515019.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-02-02T07-55-59.515019.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-02-02T07-55-59.515019.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-02-02T07-55-59.515019.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-02T07-55-59.515019.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-02-02T07-55-59.515019.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-02-02T07-55-59.515019.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-02-02T07-55-59.515019.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-02-02T07-55-59.515019.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-02-02T07-55-59.515019.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-02-02T07-55-59.515019.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-02-02T07-55-59.515019.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-02-02T07-55-59.515019.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-02-02T07-55-59.515019.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-02-02T07-55-59.515019.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-02T07-55-59.515019.parquet", "**/details_harness|hendrycksTest-virology|5_2024-02-02T07-55-59.515019.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-02-02T07-55-59.515019.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-02T07-55-59.515019.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-02-02T07-55-59.515019.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-02-02T07-55-59.515019.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-02-02T07-55-59.515019.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-02T07-55-59.515019.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-02-02T07-55-59.515019.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-02-02T07-55-59.515019.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-02-02T07-55-59.515019.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-02-02T07-55-59.515019.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-02-02T07-55-59.515019.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-02-02T07-55-59.515019.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-02-02T07-55-59.515019.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-02T07-55-59.515019.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-02-02T07-55-59.515019.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-02T07-55-59.515019.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-02T07-55-59.515019.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-02-02T07-55-59.515019.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-02-02T07-55-59.515019.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-02-02T07-55-59.515019.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-02T07-55-59.515019.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-02T07-55-59.515019.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-02T07-55-59.515019.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-02-02T07-55-59.515019.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-02T07-55-59.515019.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-02T07-55-59.515019.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-02T07-55-59.515019.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-02T07-55-59.515019.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-02-02T07-55-59.515019.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-02T07-55-59.515019.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-02T07-55-59.515019.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-02T07-55-59.515019.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-02T07-55-59.515019.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-02-02T07-55-59.515019.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-02-02T07-55-59.515019.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-02-02T07-55-59.515019.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-02-02T07-55-59.515019.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-02T07-55-59.515019.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-02-02T07-55-59.515019.parquet", "**/details_harness|hendrycksTest-management|5_2024-02-02T07-55-59.515019.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-02-02T07-55-59.515019.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-02-02T07-55-59.515019.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-02-02T07-55-59.515019.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-02-02T07-55-59.515019.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-02T07-55-59.515019.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-02-02T07-55-59.515019.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-02-02T07-55-59.515019.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-02-02T07-55-59.515019.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-02-02T07-55-59.515019.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-02-02T07-55-59.515019.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-02-02T07-55-59.515019.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-02-02T07-55-59.515019.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-02-02T07-55-59.515019.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-02-02T07-55-59.515019.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-02-02T07-55-59.515019.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-02T07-55-59.515019.parquet", "**/details_harness|hendrycksTest-virology|5_2024-02-02T07-55-59.515019.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-02-02T07-55-59.515019.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_02_02T07_55_59.515019", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-02T07-55-59.515019.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-02T07-55-59.515019.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_02_02T07_55_59.515019", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-02-02T07-55-59.515019.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-02-02T07-55-59.515019.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_02_02T07_55_59.515019", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-02-02T07-55-59.515019.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-02-02T07-55-59.515019.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_02_02T07_55_59.515019", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-02-02T07-55-59.515019.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-02-02T07-55-59.515019.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_02_02T07_55_59.515019", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-02T07-55-59.515019.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-02T07-55-59.515019.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_02_02T07_55_59.515019", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-02-02T07-55-59.515019.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-02-02T07-55-59.515019.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_02_02T07_55_59.515019", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-02-02T07-55-59.515019.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-02-02T07-55-59.515019.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_02_02T07_55_59.515019", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-02-02T07-55-59.515019.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-02-02T07-55-59.515019.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_02_02T07_55_59.515019", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-02-02T07-55-59.515019.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-02-02T07-55-59.515019.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_02_02T07_55_59.515019", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-02-02T07-55-59.515019.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-02-02T07-55-59.515019.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_02_02T07_55_59.515019", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-02-02T07-55-59.515019.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-02-02T07-55-59.515019.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_02_02T07_55_59.515019", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-02-02T07-55-59.515019.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-02-02T07-55-59.515019.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_02_02T07_55_59.515019", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-02T07-55-59.515019.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-02T07-55-59.515019.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_02_02T07_55_59.515019", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-02-02T07-55-59.515019.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-02-02T07-55-59.515019.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_02_02T07_55_59.515019", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-02T07-55-59.515019.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-02T07-55-59.515019.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_02_02T07_55_59.515019", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-02T07-55-59.515019.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-02T07-55-59.515019.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_02_02T07_55_59.515019", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-02-02T07-55-59.515019.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-02-02T07-55-59.515019.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_02_02T07_55_59.515019", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-02-02T07-55-59.515019.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-02-02T07-55-59.515019.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_02_02T07_55_59.515019", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-02-02T07-55-59.515019.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-02-02T07-55-59.515019.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_02_02T07_55_59.515019", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-02T07-55-59.515019.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-02T07-55-59.515019.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_02_02T07_55_59.515019", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-02T07-55-59.515019.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-02T07-55-59.515019.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_02_02T07_55_59.515019", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-02T07-55-59.515019.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-02T07-55-59.515019.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_02_02T07_55_59.515019", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-02-02T07-55-59.515019.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-02-02T07-55-59.515019.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_02_02T07_55_59.515019", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-02T07-55-59.515019.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-02T07-55-59.515019.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_02_02T07_55_59.515019", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-02T07-55-59.515019.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-02T07-55-59.515019.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_02_02T07_55_59.515019", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-02T07-55-59.515019.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-02T07-55-59.515019.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_02_02T07_55_59.515019", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-02T07-55-59.515019.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-02T07-55-59.515019.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_02_02T07_55_59.515019", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-02-02T07-55-59.515019.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-02-02T07-55-59.515019.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_02_02T07_55_59.515019", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-02T07-55-59.515019.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-02T07-55-59.515019.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_02_02T07_55_59.515019", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-02T07-55-59.515019.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-02T07-55-59.515019.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_02_02T07_55_59.515019", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-02T07-55-59.515019.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-02T07-55-59.515019.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_02_02T07_55_59.515019", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-02T07-55-59.515019.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-02T07-55-59.515019.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_02_02T07_55_59.515019", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-02-02T07-55-59.515019.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-02-02T07-55-59.515019.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_02_02T07_55_59.515019", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-02-02T07-55-59.515019.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-02-02T07-55-59.515019.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_02_02T07_55_59.515019", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-02-02T07-55-59.515019.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-02-02T07-55-59.515019.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_02_02T07_55_59.515019", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-02-02T07-55-59.515019.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-02-02T07-55-59.515019.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_02_02T07_55_59.515019", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-02T07-55-59.515019.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-02T07-55-59.515019.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_02_02T07_55_59.515019", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-02-02T07-55-59.515019.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-02-02T07-55-59.515019.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_02_02T07_55_59.515019", "path": ["**/details_harness|hendrycksTest-management|5_2024-02-02T07-55-59.515019.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-02-02T07-55-59.515019.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_02_02T07_55_59.515019", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-02-02T07-55-59.515019.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-02-02T07-55-59.515019.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_02_02T07_55_59.515019", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-02-02T07-55-59.515019.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-02-02T07-55-59.515019.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_02_02T07_55_59.515019", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-02-02T07-55-59.515019.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-02-02T07-55-59.515019.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_02_02T07_55_59.515019", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-02-02T07-55-59.515019.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-02-02T07-55-59.515019.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_02_02T07_55_59.515019", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-02T07-55-59.515019.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-02T07-55-59.515019.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_02_02T07_55_59.515019", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-02-02T07-55-59.515019.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-02-02T07-55-59.515019.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_02_02T07_55_59.515019", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-02-02T07-55-59.515019.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-02-02T07-55-59.515019.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_02_02T07_55_59.515019", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-02-02T07-55-59.515019.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-02-02T07-55-59.515019.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_02_02T07_55_59.515019", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-02-02T07-55-59.515019.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-02-02T07-55-59.515019.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_02_02T07_55_59.515019", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-02-02T07-55-59.515019.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-02-02T07-55-59.515019.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_02_02T07_55_59.515019", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-02-02T07-55-59.515019.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-02-02T07-55-59.515019.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_02_02T07_55_59.515019", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-02-02T07-55-59.515019.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-02-02T07-55-59.515019.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_02_02T07_55_59.515019", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-02-02T07-55-59.515019.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-02-02T07-55-59.515019.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_02_02T07_55_59.515019", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-02-02T07-55-59.515019.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-02-02T07-55-59.515019.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_02_02T07_55_59.515019", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-02-02T07-55-59.515019.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-02-02T07-55-59.515019.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_02_02T07_55_59.515019", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-02T07-55-59.515019.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-02T07-55-59.515019.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_02_02T07_55_59.515019", "path": ["**/details_harness|hendrycksTest-virology|5_2024-02-02T07-55-59.515019.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-02-02T07-55-59.515019.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_02_02T07_55_59.515019", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-02-02T07-55-59.515019.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-02-02T07-55-59.515019.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_02_02T07_55_59.515019", "path": ["**/details_harness|truthfulqa:mc|0_2024-02-02T07-55-59.515019.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-02-02T07-55-59.515019.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_02_02T07_55_59.515019", "path": ["**/details_harness|winogrande|5_2024-02-02T07-55-59.515019.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-02-02T07-55-59.515019.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_02_02T07_55_59.515019", "path": ["results_2024-02-02T07-55-59.515019.parquet"]}, {"split": "latest", "path": ["results_2024-02-02T07-55-59.515019.parquet"]}]}]}
2024-02-02T07:58:41+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of JaeyeonKang/CCK_Gony_v0.2 Dataset automatically created during the evaluation run of model JaeyeonKang/CCK_Gony_v0.2 on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-02-02T07:55:59.515019(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of JaeyeonKang/CCK_Gony_v0.2\n\n\n\nDataset automatically created during the evaluation run of model JaeyeonKang/CCK_Gony_v0.2 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-02-02T07:55:59.515019(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of JaeyeonKang/CCK_Gony_v0.2\n\n\n\nDataset automatically created during the evaluation run of model JaeyeonKang/CCK_Gony_v0.2 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-02-02T07:55:59.515019(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
6787ef95abfd07a3777872419b15aa533cdda650
# Synthetic Search Queries : Russian This is generated with GPT-4 Turbo synthetic search queries, that based on [the given filters schema](https://huggingface.co/datasets/EmbeddingStudio/synthetic-search-filters-ru-raw) for the given business/service categories for Russian language domain: ``` Artificial Intelligence and Machine Learning, Automotive, Automotive Dealerships, Banking Services, Books and Media, Cloud Computing Services, Cloud-based Development Environments, Collaborative Development Environments, Commercial Real Estate, Continuous Integration/Continuous Deployment, Credit Services, Customer Support Services, Customer Support and Feedback, Cybersecurity Software, Data Analytics and Business Intelligence, Dating Apps, Digital and Mobile Banking, Documentation and Knowledge Sharing, E-commerce Platforms, Eco-Friendly and Sustainable Properties, Educational Institutions, Electronics, Enterprise Software Development, Entertainment and Media Platforms, Event Planning Services, Fashion and Apparel, Financial Planning and Advisory, Food and Grocery, Game Development, Government Services, Health and Beauty, Healthcare Providers, Home and Garden, Image Stock Platforms, Insurance Services, International Real Estate, Internet of Things (IoT) Development, Investment Services, Issue Tracking and Bug Reporting, Job Recruitment Agencies, Land Sales and Acquisitions, Legal Services, Logistics and Supply Chain Management, Luxury and High-End Properties, Market Research Firms, Mobile App Development, Mortgage and Real Estate Services, Payment Processing, Pet Supplies, Professional Social Networks, Project Management Tools, Property Management, Real Estate Consulting, Real Estate Development, Real Estate Investment, Residential Real Estate, Restaurants and Food Delivery Services, Retail Stores (Online and Offline), Risk Management and Compliance, Social Networks, Sports and Outdoors, Task and Time Management, Taxation Services, Team Communication and Chat Tools, Telecommunication Companies, Toys and Games, Travel and Booking Agencies, Travelers and Consumers, User Interface/User Experience Design, Version Control Systems, Video Hosting and Portals, Web Development``` ## Column descriptions * Query (type: str) - generated search query. * category (type: str) - name of related business / service category. * Parsed (type: List[str]) - list of JSON readable parsed values: * Name (type: str) - a name of representation from provided filters schema. * Type (type: str) - python-like types. * Value (type: Union[str, float, int]) - parsed value itself, can be not exaclty present in a given query if related filter is an enumeration. ## Generation strategy We used synthetically generated query parsing instructions: * We generated lists of possible filters for 72 company categories: * [Raw version of filters dataset](https://huggingface.co/datasets/EmbeddingStudio/synthetic-search-filters-ru-raw) * [Split by representations](https://huggingface.co/datasets/EmbeddingStudio/synthetic-search-filters-ru) * Select randomly up-to 150 possible combinations (1-3 filters in each combination) of filters, the way each filter's representation appears maximum twice. * For a given category and combination we [generated](https://huggingface.co/datasets/EmbeddingStudio/synthetic-search-queries-ru) with GPT-4 Turbo: * 2 search queries and theirs parsed version with unstructured parts. * 2 search queries and theirs parsed version without unstructured part. * Using filters, queries and parsed version we prepared [27.42k saiga format instruction](https://huggingface.co/datasets/EmbeddingStudio/query-parsing-instructions-saiga) **Warning:** EmbeddingStudio team aware you that generated queries **weren't enough curated**, and will be curated later once we finish our product market fit stage We also used GPT-4 Turbo for generation of search queries and theirs parsed version. Main principles were: * If passed schema doesn't contain possible filter, do not generate query itself or a possible filter * If a selected representations combination contains enumeration, so we ask to map values in a search query and a parsed version. * If a selected representations combination contains pattern, so we ask GPT-4 Turbo to be aligned with a pattern ## Train / test splitting principles As we are trying to fine-tune LLM to follow zero-shot query parsing instructions, so we want to test: * Ability to work well with unseen domain * Ability to work well with unseen filters * Ability to work well with unseen queries For these purposes we: 1. We put into test split 5 categories, completely separared from train: `Automotive, Educational Institutions, Enterprise Software Development, Payment Processing, Professional Social Networks`. 2. Also out of each appearing in train company categories, we put aside / removed one filter and queries related to it. 3. Selected 5% of other queries and put it into test. ## How to use it ```python from datasets import load_dataset search_queries = load_dataset('EmbeddingStudio/synthetic-search-queries-ru') ```
EmbeddingStudio/synthetic-search-queries-ru
[ "task_categories:token-classification", "task_categories:text-generation", "size_categories:10K<n<100K", "language:ru", "license:apache-2.0", "synthetic", "search-queries", "e-commerce", "online-shops", "travel-agencies", "educational-institutions-ai", "job-recruitment-automation", "banking-digital-services", "investment-ai-analysis", "insurance-tech-innovation", "financial-advisory-ai", "credit-services-automation", "payment-processing-tech", "mortgage-tech-solutions", "real-estate-digital-solutions", "taxation-tech-services", "risk-management-ai", "compliance-automation", "digital-banking-innovation", "mobile-banking-tech", "online-retail-tech", "offline-retail-automation", "automotive-dealership-tech", "restaurant-automation-tech", "food-delivery-ai", "entertainment-platforms-ai", "media-platforms-tech", "government-services-automation", "travel-tech-innovation", "consumer-analytics-ai", "logistics-tech-automation", "supply-chain-ai", "customer-support-tech", "market-research-ai", "mobile-app-dev-tech", "game-dev-ai", "cloud-computing-services", "data-analytics-ai", "business-intelligence-ai", "cybersecurity-software-tech", "ui-ux-design-ai", "iot-development-tech", "project-management-tools-ai", "version-control-systems-tech", "ci-cd-automation", "issue-tracking-ai", "bug-reporting-automation", "collaborative-dev-environments", "team-communication-tech", "task-time-management-ai", "customer-feedback-ai", "cloud-based-dev-tech", "image-stock-platforms-ai", "video-hosting-tech", "social-networks-ai", "professional-social-networks-ai", "dating-apps-tech", "region:us" ]
2024-02-02T08:08:12+00:00
{"language": ["ru"], "license": "apache-2.0", "size_categories": ["10K<n<100K"], "task_categories": ["token-classification", "text-generation"], "pretty_name": "Synthetic Search Queries : Russian", "dataset_info": {"features": [{"name": "Query", "dtype": "string"}, {"name": "category", "dtype": "string"}, {"name": "Parsed", "sequence": "string"}], "splits": [{"name": "train", "num_bytes": 2745353, "num_examples": 8494}, {"name": "test", "num_bytes": 941011, "num_examples": 2731}], "download_size": 1082868, "dataset_size": 3686364}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}, {"split": "test", "path": "data/test-*"}]}], "tags": ["synthetic", "search-queries", "e-commerce", "online-shops", "travel-agencies", "educational-institutions-ai", "job-recruitment-automation", "banking-digital-services", "investment-ai-analysis", "insurance-tech-innovation", "financial-advisory-ai", "credit-services-automation", "payment-processing-tech", "mortgage-tech-solutions", "real-estate-digital-solutions", "taxation-tech-services", "risk-management-ai", "compliance-automation", "digital-banking-innovation", "mobile-banking-tech", "online-retail-tech", "offline-retail-automation", "automotive-dealership-tech", "restaurant-automation-tech", "food-delivery-ai", "entertainment-platforms-ai", "media-platforms-tech", "government-services-automation", "travel-tech-innovation", "consumer-analytics-ai", "logistics-tech-automation", "supply-chain-ai", "customer-support-tech", "market-research-ai", "mobile-app-dev-tech", "game-dev-ai", "cloud-computing-services", "data-analytics-ai", "business-intelligence-ai", "cybersecurity-software-tech", "ui-ux-design-ai", "iot-development-tech", "project-management-tools-ai", "version-control-systems-tech", "ci-cd-automation", "issue-tracking-ai", "bug-reporting-automation", "collaborative-dev-environments", "team-communication-tech", "task-time-management-ai", "customer-feedback-ai", "cloud-based-dev-tech", "image-stock-platforms-ai", "video-hosting-tech", "social-networks-ai", "professional-social-networks-ai", "dating-apps-tech"]}
2024-02-02T11:42:25+00:00
[]
[ "ru" ]
TAGS #task_categories-token-classification #task_categories-text-generation #size_categories-10K<n<100K #language-Russian #license-apache-2.0 #synthetic #search-queries #e-commerce #online-shops #travel-agencies #educational-institutions-ai #job-recruitment-automation #banking-digital-services #investment-ai-analysis #insurance-tech-innovation #financial-advisory-ai #credit-services-automation #payment-processing-tech #mortgage-tech-solutions #real-estate-digital-solutions #taxation-tech-services #risk-management-ai #compliance-automation #digital-banking-innovation #mobile-banking-tech #online-retail-tech #offline-retail-automation #automotive-dealership-tech #restaurant-automation-tech #food-delivery-ai #entertainment-platforms-ai #media-platforms-tech #government-services-automation #travel-tech-innovation #consumer-analytics-ai #logistics-tech-automation #supply-chain-ai #customer-support-tech #market-research-ai #mobile-app-dev-tech #game-dev-ai #cloud-computing-services #data-analytics-ai #business-intelligence-ai #cybersecurity-software-tech #ui-ux-design-ai #iot-development-tech #project-management-tools-ai #version-control-systems-tech #ci-cd-automation #issue-tracking-ai #bug-reporting-automation #collaborative-dev-environments #team-communication-tech #task-time-management-ai #customer-feedback-ai #cloud-based-dev-tech #image-stock-platforms-ai #video-hosting-tech #social-networks-ai #professional-social-networks-ai #dating-apps-tech #region-us
# Synthetic Search Queries : Russian This is generated with GPT-4 Turbo synthetic search queries, that based on the given filters schema for the given business/service categories for Russian language domain: ## Column descriptions * Query (type: str) - generated search query. * category (type: str) - name of related business / service category. * Parsed (type: List[str]) - list of JSON readable parsed values: * Name (type: str) - a name of representation from provided filters schema. * Type (type: str) - python-like types. * Value (type: Union[str, float, int]) - parsed value itself, can be not exaclty present in a given query if related filter is an enumeration. ## Generation strategy We used synthetically generated query parsing instructions: * We generated lists of possible filters for 72 company categories: * Raw version of filters dataset * Split by representations * Select randomly up-to 150 possible combinations (1-3 filters in each combination) of filters, the way each filter's representation appears maximum twice. * For a given category and combination we generated with GPT-4 Turbo: * 2 search queries and theirs parsed version with unstructured parts. * 2 search queries and theirs parsed version without unstructured part. * Using filters, queries and parsed version we prepared 27.42k saiga format instruction Warning: EmbeddingStudio team aware you that generated queries weren't enough curated, and will be curated later once we finish our product market fit stage We also used GPT-4 Turbo for generation of search queries and theirs parsed version. Main principles were: * If passed schema doesn't contain possible filter, do not generate query itself or a possible filter * If a selected representations combination contains enumeration, so we ask to map values in a search query and a parsed version. * If a selected representations combination contains pattern, so we ask GPT-4 Turbo to be aligned with a pattern ## Train / test splitting principles As we are trying to fine-tune LLM to follow zero-shot query parsing instructions, so we want to test: * Ability to work well with unseen domain * Ability to work well with unseen filters * Ability to work well with unseen queries For these purposes we: 1. We put into test split 5 categories, completely separared from train: 'Automotive, Educational Institutions, Enterprise Software Development, Payment Processing, Professional Social Networks'. 2. Also out of each appearing in train company categories, we put aside / removed one filter and queries related to it. 3. Selected 5% of other queries and put it into test. ## How to use it
[ "# Synthetic Search Queries : Russian\n\nThis is generated with GPT-4 Turbo synthetic search queries, that based on the given filters schema for the given business/service categories for Russian language domain:", "## Column descriptions\n\n* Query (type: str) - generated search query.\n* category (type: str) - name of related business / service category.\n* Parsed (type: List[str]) - list of JSON readable parsed values:\n * Name (type: str) - a name of representation from provided filters schema.\n * Type (type: str) - python-like types.\n * Value (type: Union[str, float, int]) - parsed value itself, can be not exaclty present in a given query if related filter is an enumeration.", "## Generation strategy\n\nWe used synthetically generated query parsing instructions:\n* We generated lists of possible filters for 72 company categories: \n * Raw version of filters dataset\n * Split by representations\n* Select randomly up-to 150 possible combinations (1-3 filters in each combination) of filters, the way each filter's representation appears maximum twice.\n* For a given category and combination we generated with GPT-4 Turbo:\n * 2 search queries and theirs parsed version with unstructured parts.\n * 2 search queries and theirs parsed version without unstructured part. \n* Using filters, queries and parsed version we prepared 27.42k saiga format instruction\n\nWarning: EmbeddingStudio team aware you that generated queries weren't enough curated, and will be curated later once we finish our product market fit stage\n\nWe also used GPT-4 Turbo for generation of search queries and theirs parsed version. Main principles were: \n* If passed schema doesn't contain possible filter, do not generate query itself or a possible filter \n* If a selected representations combination contains enumeration, so we ask to map values in a search query and a parsed version.\n* If a selected representations combination contains pattern, so we ask GPT-4 Turbo to be aligned with a pattern", "## Train / test splitting principles\n\nAs we are trying to fine-tune LLM to follow zero-shot query parsing instructions, so we want to test:\n* Ability to work well with unseen domain\n* Ability to work well with unseen filters\n* Ability to work well with unseen queries\n\nFor these purposes we:\n1. We put into test split 5 categories, completely separared from train: 'Automotive, Educational Institutions, Enterprise Software Development, Payment Processing, Professional Social Networks'.\n2. Also out of each appearing in train company categories, we put aside / removed one filter and queries related to it.\n3. Selected 5% of other queries and put it into test.", "## How to use it" ]
[ "TAGS\n#task_categories-token-classification #task_categories-text-generation #size_categories-10K<n<100K #language-Russian #license-apache-2.0 #synthetic #search-queries #e-commerce #online-shops #travel-agencies #educational-institutions-ai #job-recruitment-automation #banking-digital-services #investment-ai-analysis #insurance-tech-innovation #financial-advisory-ai #credit-services-automation #payment-processing-tech #mortgage-tech-solutions #real-estate-digital-solutions #taxation-tech-services #risk-management-ai #compliance-automation #digital-banking-innovation #mobile-banking-tech #online-retail-tech #offline-retail-automation #automotive-dealership-tech #restaurant-automation-tech #food-delivery-ai #entertainment-platforms-ai #media-platforms-tech #government-services-automation #travel-tech-innovation #consumer-analytics-ai #logistics-tech-automation #supply-chain-ai #customer-support-tech #market-research-ai #mobile-app-dev-tech #game-dev-ai #cloud-computing-services #data-analytics-ai #business-intelligence-ai #cybersecurity-software-tech #ui-ux-design-ai #iot-development-tech #project-management-tools-ai #version-control-systems-tech #ci-cd-automation #issue-tracking-ai #bug-reporting-automation #collaborative-dev-environments #team-communication-tech #task-time-management-ai #customer-feedback-ai #cloud-based-dev-tech #image-stock-platforms-ai #video-hosting-tech #social-networks-ai #professional-social-networks-ai #dating-apps-tech #region-us \n", "# Synthetic Search Queries : Russian\n\nThis is generated with GPT-4 Turbo synthetic search queries, that based on the given filters schema for the given business/service categories for Russian language domain:", "## Column descriptions\n\n* Query (type: str) - generated search query.\n* category (type: str) - name of related business / service category.\n* Parsed (type: List[str]) - list of JSON readable parsed values:\n * Name (type: str) - a name of representation from provided filters schema.\n * Type (type: str) - python-like types.\n * Value (type: Union[str, float, int]) - parsed value itself, can be not exaclty present in a given query if related filter is an enumeration.", "## Generation strategy\n\nWe used synthetically generated query parsing instructions:\n* We generated lists of possible filters for 72 company categories: \n * Raw version of filters dataset\n * Split by representations\n* Select randomly up-to 150 possible combinations (1-3 filters in each combination) of filters, the way each filter's representation appears maximum twice.\n* For a given category and combination we generated with GPT-4 Turbo:\n * 2 search queries and theirs parsed version with unstructured parts.\n * 2 search queries and theirs parsed version without unstructured part. \n* Using filters, queries and parsed version we prepared 27.42k saiga format instruction\n\nWarning: EmbeddingStudio team aware you that generated queries weren't enough curated, and will be curated later once we finish our product market fit stage\n\nWe also used GPT-4 Turbo for generation of search queries and theirs parsed version. Main principles were: \n* If passed schema doesn't contain possible filter, do not generate query itself or a possible filter \n* If a selected representations combination contains enumeration, so we ask to map values in a search query and a parsed version.\n* If a selected representations combination contains pattern, so we ask GPT-4 Turbo to be aligned with a pattern", "## Train / test splitting principles\n\nAs we are trying to fine-tune LLM to follow zero-shot query parsing instructions, so we want to test:\n* Ability to work well with unseen domain\n* Ability to work well with unseen filters\n* Ability to work well with unseen queries\n\nFor these purposes we:\n1. We put into test split 5 categories, completely separared from train: 'Automotive, Educational Institutions, Enterprise Software Development, Payment Processing, Professional Social Networks'.\n2. Also out of each appearing in train company categories, we put aside / removed one filter and queries related to it.\n3. Selected 5% of other queries and put it into test.", "## How to use it" ]
f2861fa01f9d80c842bfa4cfc599927d45324608
Dataset uded for webshell classification task. Repo contains: * Opensource collection of php/asp/aspx/jsp webshells * Opensource collection of php/asp/aspx/jsp normal scripts All files are collected fron github.
c01dsnap/Webshell
[ "license:apache-2.0", "region:us" ]
2024-02-02T08:08:20+00:00
{"license": "apache-2.0"}
2024-02-02T08:58:45+00:00
[]
[]
TAGS #license-apache-2.0 #region-us
Dataset uded for webshell classification task. Repo contains: * Opensource collection of php/asp/aspx/jsp webshells * Opensource collection of php/asp/aspx/jsp normal scripts All files are collected fron github.
[]
[ "TAGS\n#license-apache-2.0 #region-us \n" ]
3e6b20f7d777d27e4916c7cbbde8ab008b8c474e
We present the **Arxiv Figures & Tables Database (AFTdb)**, which consists of an aggregation of figures and tables from scientific articles sourced from the arXiv platform. The purpose of this dataset is to train multimodal models specialized in images of document-type objects (graphs, functional diagrams, tables, etc.), rather than photographic-type images. The idea is that a model trained on this type of data will be more coherent within the context of document corpora than a model trained on pictorial compositions. To establish a connection between the two modalities (image and text), captions for each object are also provided. As captions can sometimes be very brief, the article's summary is also included to add context to the document object if necessary. All textual data (titles, abstracts, and captions) are available in both English (original language) and French through translation using Google Translate. For this reason, a corpus of scientific articles was prioritized. Due to the scientific rigor demanded, each document-type object is systematically accompanied by a caption (similar to captions for pictorial images on platforms like Flickr, for example). The database is divided into two types of document objects: figures and tables. For the table part, it is possible to approach two different types of learning. The first, similar to figures, associates the image with the caption. However, in the data field, the LaTeX source code of the table is also provided. An objective can be to take an image of a table and convert it into text using this source code. Loading the database -------------------- The figure part is relatively substantial, and it is advisable to use the dataset in streaming mode: ```python aftdb_figure = load_dataset("cmarkea/aftdb", "figure", streaming=True) ``` The table part is less substantial and can be downloaded locally directly: ```python aftdb_table = load_dataset("cmarkea/aftdb", "table") ``` Both categories are compatible, and it is possible to load both types simultaneously: ```python aftdb = load_dataset("cmarkea/aftdb", "figure+table", streaming=True) ``` This is the **default** configuration. Statistical Description ----------------------- The descended articles correspond to a portion of the articles that had their last modifications in the year 2023 on the arXiv platform. | Number of | | |------------------------------------------:|:----------| | articles | 22,893 | | authors | 90,165 | | figures (train) | 157,944 | | figures (test) | 3,579 | | tables (train) | 16,415 | | tables (test) | 395 | | total words in English titles | 234,072 | | total words in French titles | 308,187 | | total words in English abstracts | 3,879,940 | | total words in French abstracts | 4,536,101 | | total words in English captions | 7,689,270 | | total words in French captions | 8,513,199 | Here is the distribution of articles in the dataset by arXiv category. | categorie | Freq (%) | categorie | Freq (%) | |-------------------:|:-----------|-------------------:|:-----------| | cs.LG | 7.29594 | cs.AI | 3.88624 | | cs.CV | 2.48066 | hep-ph | 2.12586 | | astro-ph.SR | 2.01854 | astro-ph.GA | 1.85782 | | stat.ME | 1.77373 | physics.flu-dyn | 1.71847 | | cond-mat.stat-mech | 1.66027 | stat.ML | 1.64265 | | eess.SP | 1.63971 | cs.CL | 1.4838 | | astro-ph.HE | 1.48087 | hep-ex | 1.43361 | | astro-ph.IM | 1.43014 | physics.comp-ph | 1.39464 | | nucl-th | 1.3925 | math.NA | 1.36794 | | hep-th | 1.30467 | physics.optics | 1.28037 | | astro-ph.EP | 1.19494 | cond-mat.mtrl-sci | 1.18373 | | cs.SY | 1.17305 | eess.SY | 1.16131 | | stat.AP | 1.14369 | cs.IT | 1.14022 | | math.IT | 1.14022 | physics.ins-det | 1.1258 | | gr-qc | 1.10845 | cs.RO | 1.10765 | | cond-mat.soft | 1.05425 | cond-mat.mes-hall | 1.04277 | | astro-ph.CO | 1.03743 | math.OC | 1.01047 | | cs.CR | 0.994986 | cond-mat.str-el | 0.984041 | | cs.DC | 0.972294 | physics.chem-ph | 0.95681 | | cond-mat.dis-nn | 0.947199 | cs.NI | 0.941593 | | cond-mat.quant-gas | 0.880191 | physics.atom-ph | 0.878322 | | cs.CE | 0.874851 | hep-lat | 0.837476 | | cs.NE | 0.836141 | cs.SI | 0.830001 | | math.DS | 0.821992 | eess.AS | 0.813716 | | nucl-ex | 0.810512 | math-ph | 0.808376 | | cs.HC | 0.784616 | cs.MM | 0.709065 | | physics.app-ph | 0.695182 | cs.SD | 0.694915 | | physics.plasm-ph | 0.694381 | cs.MA | 0.693847 | | math.ST | 0.682101 | quant-ph | 2.53645 | | stat.TH | 0.682101 | physics.bio-ph | 0.650332 | | eess.IV | 0.650065 | physics.soc-ph | 0.649531 | | cs.GR | 0.633513 | cs.IR | 0.620965 | | cs.DB | 0.620165 | cs.CY | 0.596404 | | cs.AR | 0.576115 | math.GT | 0.555025 | | q-bio.QM | 0.545948 | physics.data-an | 0.543812 | | math.CO | 0.535269 | math.PR | 0.51845 | | physics.ao-ph | 0.515246 | nlin.CD | 0.496559 | | stat.CO | 0.49202 | q-bio.PE | 0.474934 | | cond-mat.supr-con | 0.454378 | q-bio.NC | 0.453577 | | cs.GT | 0.445301 | econ.GN | 0.429283 | | cs.SE | 0.423143 | econ.GN | 0.429283 | | cs.ET | 0.419405 | physics.space-ph | 0.394577 | | nlin.PS | 0.368949 | cs.PF | 0.345188 | | physics.acc-ph | 0.335845 | cond-mat.other | 0.331573 | | econ.EM | 0.328903 | physics.med-ph | 0.320361 | | cs.DM | 0.304876 | math.AP | 0.294198 | | nlin.AO | 0.256555 | q-bio.BM | 0.235198 | | q-fin.CP | 0.223184 | math.AT | 0.198624 | | cs.PL | 0.192483 | physics.class-ph | 0.18661 | | math.DG | 0.184741 | q-fin.ST | 0.181538 | | cs.LO | 0.17433 | cs.CC | 0.153506 | | cs.DL | 0.143895 | q-fin.TR | 0.136954 | | math.MG | 0.135352 | math.AG | 0.134818 | | q-fin.MF | 0.131615 | q-bio.TO | 0.126809 | | q-bio.GN | 0.120936 | math.SG | 0.118266 | | math.GR | 0.116665 | math.CA | 0.116398 | | math.CV | 0.116398 | cs.MS | 0.110524 | | math.HO | 0.106253 | nlin.SI | 0.104918 | | math.RT | 0.100113 | cs.FL | 0.0995787 | | q-fin.PM | 0.097176 | econ.TH | 0.0955742 | | math.SP | 0.0880991 | q-fin.GN | 0.0875652 | | q-fin.RM | 0.0859634 | physics.ed-ph | 0.0819589 | | math.QA | 0.0787553 | q-bio.CB | 0.0752847 | | nlin.CG | 0.072882 | physics.atm-clus | 0.072615 | | math.NT | 0.0720811 | math.FA | 0.0712802 | | q-bio.MN | 0.0707463 | physics.pop-ph | 0.064873 | | q-fin.PR | 0.0635382 | stat.OT | 0.0619364 | | cs.OS | 0.0544613 | cs.SC | 0.0467192 | | physics.gen-ph | 0.0461853 | physics.hist-ph | 0.0429817 | | math.AC | 0.0379093 | q-bio.SC | 0.0331039 | | math.CT | 0.0309682 | math.RA | 0.0304342 | | math.GN | 0.0274976 | math.LO | 0.0261628 | | cs.OH | 0.0248279 | math.GM | 0.0168189 | | math.OA | 0.016552 | cs.GL | 0.0114796 | | math.KT | 0.00694114 | q-bio.OT | 0.00186877 | Field Descriptions ------------------ - **id:** Unique identifier for each observation. - **paper_id:** Unique arXiv identifier for each article. - **type:** 'figure' for graphic objects such as graphs, functional diagrams, etc., and 'table' for tables. - **authors:** Names of the article's authors. - **categories:** arXiv categories of the article. - **title:** Title of the article. - **summary:** Article summary. - **caption:** Caption of the document-type object. - **image:** Pillow image of the document-type object. - **data:** For figures, it represents the filename of the figure; for tables, it is the LaTeX transcription of the table. - **newcommands:** List containing the LaTeX `newcommands` used in the article. Citation -------- ```bibtex @online{DeAFTdb, AUTHOR = {Cyrile Delestre}, URL = {https://huggingface.co/datasets/cmarkea/aftdb}, YEAR = {2024}, KEYWORDS = {NLP ; Multimodal} } ```
cmarkea/aftdb
[ "task_categories:text-generation", "task_categories:text-to-image", "task_categories:image-to-text", "language:fr", "language:en", "license:apache-2.0", "arXiv", "multimodal", "document-type objects", "region:us" ]
2024-02-02T08:10:29+00:00
{"language": ["fr", "en"], "license": "apache-2.0", "task_categories": ["text-generation", "text-to-image", "image-to-text"], "tags": ["arXiv", "multimodal", "document-type objects"]}
2024-02-11T11:28:39+00:00
[]
[ "fr", "en" ]
TAGS #task_categories-text-generation #task_categories-text-to-image #task_categories-image-to-text #language-French #language-English #license-apache-2.0 #arXiv #multimodal #document-type objects #region-us
We present the Arxiv Figures & Tables Database (AFTdb), which consists of an aggregation of figures and tables from scientific articles sourced from the arXiv platform. The purpose of this dataset is to train multimodal models specialized in images of document-type objects (graphs, functional diagrams, tables, etc.), rather than photographic-type images. The idea is that a model trained on this type of data will be more coherent within the context of document corpora than a model trained on pictorial compositions. To establish a connection between the two modalities (image and text), captions for each object are also provided. As captions can sometimes be very brief, the article's summary is also included to add context to the document object if necessary. All textual data (titles, abstracts, and captions) are available in both English (original language) and French through translation using Google Translate. For this reason, a corpus of scientific articles was prioritized. Due to the scientific rigor demanded, each document-type object is systematically accompanied by a caption (similar to captions for pictorial images on platforms like Flickr, for example). The database is divided into two types of document objects: figures and tables. For the table part, it is possible to approach two different types of learning. The first, similar to figures, associates the image with the caption. However, in the data field, the LaTeX source code of the table is also provided. An objective can be to take an image of a table and convert it into text using this source code. Loading the database -------------------- The figure part is relatively substantial, and it is advisable to use the dataset in streaming mode: The table part is less substantial and can be downloaded locally directly: Both categories are compatible, and it is possible to load both types simultaneously: This is the default configuration. Statistical Description ----------------------- The descended articles correspond to a portion of the articles that had their last modifications in the year 2023 on the arXiv platform. Here is the distribution of articles in the dataset by arXiv category. Field Descriptions ------------------ * id: Unique identifier for each observation. * paper\_id: Unique arXiv identifier for each article. * type: 'figure' for graphic objects such as graphs, functional diagrams, etc., and 'table' for tables. * authors: Names of the article's authors. * categories: arXiv categories of the article. * title: Title of the article. * summary: Article summary. * caption: Caption of the document-type object. * image: Pillow image of the document-type object. * data: For figures, it represents the filename of the figure; for tables, it is the LaTeX transcription of the table. * newcommands: List containing the LaTeX 'newcommands' used in the article. Citation --------
[]
[ "TAGS\n#task_categories-text-generation #task_categories-text-to-image #task_categories-image-to-text #language-French #language-English #license-apache-2.0 #arXiv #multimodal #document-type objects #region-us \n" ]
294f8945fa888f80261de3be6dd95d48902102c1
# Dataset Card for [TED2020-TW-Corpus] ## Table of Contents - [Table of Contents](#table-of-contents) - [Dataset Description](#dataset-description) - [Dataset Summary](#dataset-summary) - [Supported Tasks and Leaderboards](#supported-tasks-and-leaderboards) - [Languages](#languages) - [Dataset Structure](#dataset-structure) - [Data Instances](#data-instances) - [Data Fields](#data-fields) - [Data Splits](#data-splits) - [Dataset Creation](#dataset-creation) - [Curation Rationale](#curation-rationale) - [Source Data](#source-data) - [Annotations](#annotations) - [Personal and Sensitive Information](#personal-and-sensitive-information) - [Considerations for Using the Data](#considerations-for-using-the-data) - [Social Impact of Dataset](#social-impact-of-dataset) - [Discussion of Biases](#discussion-of-biases) - [Other Known Limitations](#other-known-limitations) - [Additional Information](#additional-information) - [Dataset Curators](#dataset-curators) - [Licensing Information](#licensing-information) - [Citation Information](#citation-information) - [Contributions](#contributions) ## Dataset Description - **Homepage:** - **Repository:** - **Paper:** - **Leaderboard:** - **Point of Contact:** [Heng-Shiou Sheu](mailto:[email protected]) ### Dataset Summary TED2020 是一個機器翻譯基準的多語言資料集,源自 [OPUS](https://opus.nlpl.eu/TED2020/corpus/version/TED2020) 收集的使用者貢獻的翻譯,並由 [OPUS](https://opus.nlpl.eu/)。該資料集包括按語言對排序的測試和開發資料。它包括數百種語言對的測試集,並且不斷更新。請檢查版本號標籤以引用您正在使用的版本。 TED2020 收集了從1984年到2020年的演講,涵蓋了各種主題,包括科學、技術、藝術、教育、環境、社會問題等。該資料集是一個非常有價值的資源,可以用於研究和分析演講者的演講風格、主題的變化以及觀眾的反應。 ### Supported Tasks and Leaderboards ### Languages 此資料集涵蓋數百種語言和語言對,並按 ISO-639-3 語言組織。目前版本涵蓋以下語言。繁體中文、英文、日文、韓文、印尼文、越南文、泰文 ## Dataset Structure ### Data Instances 資料以 , 分隔檔案中內容,具有三個欄位:指示、輸入和輸出。請注意,我們並不暗示平移方向,並認為資料集是對稱的並用作兩個方向的測試集。 ### Data Splits 先整理出 Train 資料。 ## Dataset Creation ### Curation Rationale 本資料集將持續更新,未來將公開發佈於 Github 當中。高語言覆蓋率是本計畫的主要目標,資料集的準備與標準化語言標籤和分發格式保持一致和系統化。 ### Source Data #### Initial Data Collection and Normalization TED2020 資料集是從提交到[OPUS - TED2020](https://opus.nlpl.eu/TED2020/corpus/version/TED2020) 的使用者貢獻的翻譯中收集的,並編譯成[OPUS](https://opus.nlpl.eu) 中的多並行語料庫)。 #### Who are the source language producers? 這些轉錄本已由全球志工社群翻譯為超過 100 種語言。平行語料庫及其驗證程式碼可從[TED](https://www.ted.com/participate/translate)取得 University of Helsinki及其[language_technology_research group](https://blogs.helsinki.fi/language-technology/) 管理。用於創建和使用資源的數據和工具是[開源](https://github.com/Helsinki-NLP/Tatoeba-Challenge/),並將作為[OPUS生態系統](https://opus.nlpl.eu/) 用於平行資料和機器翻譯研究。 ### Personal and Sensitive Information 有關處理個人資訊和敏感資訊的信息,我們請諮詢資料的[原始提供者](https://opus.nlpl.eu/TED2020/corpus/version/TED2020)。該資料集未經過任何方式處理以檢測或刪除潛在的敏感資訊或個人資訊。 ### Social Impact of Dataset 語言覆蓋率很高,因此它代表了機器翻譯開發的非常有價值的資源,特別是對於資源較少的語言和語言對。不斷成長的資料庫也代表著一種動態資源,其價值將進一步成長。 ### Other Known Limitations 這些句子通常很短,因此很容易翻譯。對於高資源語言,這會導致結果不如更具挑戰性的基準有用。對於資源較少的語言對來說,即使在非常具有挑戰性的設定中,範例的有限複雜性實際上也是衡量進度的一件好事。 ### Dataset Curators 此資料集由Heng-Shiou Sheu 製作。 ### Licensing Information 這些資料集使用 [TED Talks Usage Policy](https://www.ted.com/about/our-organization/our-policies-terms/ted-talks-usage-policy) 。有關原始資料集使用條款的詳細資訊列於[此處](https://www.ted.com/about/our-organization/our-policies-terms/ted-talks-usage-policy)。 ### Citation Information ``` @inproceedings{Heng666/TED2020-TW-Corpus, title={Taiwanese Phrases Multilingual Translation Dataset from TED2020 Talks}, author={Heng-Shiou Sheu}, year={2024}, url={https://huggingface.co/datasets/Heng666/TED2020-TW-Corpus}, } ```
Heng666/TED2020-TW-Corpus
[ "task_categories:translation", "size_categories:10M<n<100M", "language:en", "language:ja", "language:ko", "language:id", "language:vi", "language:th", "language:tw", "license:unknown", "taiwan", "translation", "Ted2020", "region:us" ]
2024-02-02T08:21:07+00:00
{"language": ["en", "ja", "ko", "id", "vi", "th", "tw"], "license": "unknown", "size_categories": ["10M<n<100M"], "task_categories": ["translation"], "pretty_name": "TED2020-TW-Corpus", "dataset_info": [{"config_name": "en-zh_tw", "features": [{"name": "instruction", "dtype": "string"}, {"name": "input", "dtype": "string"}, {"name": "output", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 105192098, "num_examples": 394054}], "download_size": 50558276, "dataset_size": 105192098}, {"config_name": "id-zh_tw", "features": [{"name": "instruction", "dtype": "string"}, {"name": "input", "dtype": "string"}, {"name": "output", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 42245033, "num_examples": 153365}], "download_size": 19374788, "dataset_size": 42245033}, {"config_name": "ja-zh_tw", "features": [{"name": "instruction", "dtype": "string"}, {"name": "input", "dtype": "string"}, {"name": "output", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 101069421, "num_examples": 351078}], "download_size": 47707306, "dataset_size": 101069421}, {"config_name": "ko-zh_tw", "features": [{"name": "instruction", "dtype": "string"}, {"name": "input", "dtype": "string"}, {"name": "output", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 110871742, "num_examples": 374075}], "download_size": 53243063, "dataset_size": 110871742}, {"config_name": "th-zh_tw", "features": [{"name": "instruction", "dtype": "string"}, {"name": "input", "dtype": "string"}, {"name": "output", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 64742729, "num_examples": 156328}], "download_size": 25868969, "dataset_size": 64742729}, {"config_name": "vi-zh_tw", "features": [{"name": "instruction", "dtype": "string"}, {"name": "input", "dtype": "string"}, {"name": "output", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 95714104, "num_examples": 314214}], "download_size": 43462345, "dataset_size": 95714104}], "configs": [{"config_name": "en-zh_tw", "data_files": [{"split": "train", "path": "en-zh_tw/train-*"}]}, {"config_name": "id-zh_tw", "data_files": [{"split": "train", "path": "id-zh_tw/train-*"}]}, {"config_name": "ja-zh_tw", "data_files": [{"split": "train", "path": "ja-zh_tw/train-*"}]}, {"config_name": "ko-zh_tw", "data_files": [{"split": "train", "path": "ko-zh_tw/train-*"}]}, {"config_name": "th-zh_tw", "data_files": [{"split": "train", "path": "th-zh_tw/train-*"}]}, {"config_name": "vi-zh_tw", "data_files": [{"split": "train", "path": "vi-zh_tw/train-*"}]}], "viewer": true, "tags": ["taiwan", "translation", "Ted2020"]}
2024-02-02T08:53:00+00:00
[]
[ "en", "ja", "ko", "id", "vi", "th", "tw" ]
TAGS #task_categories-translation #size_categories-10M<n<100M #language-English #language-Japanese #language-Korean #language-Indonesian #language-Vietnamese #language-Thai #language-Twi #license-unknown #taiwan #translation #Ted2020 #region-us
# Dataset Card for [TED2020-TW-Corpus] ## Table of Contents - Table of Contents - Dataset Description - Dataset Summary - Supported Tasks and Leaderboards - Languages - Dataset Structure - Data Instances - Data Fields - Data Splits - Dataset Creation - Curation Rationale - Source Data - Annotations - Personal and Sensitive Information - Considerations for Using the Data - Social Impact of Dataset - Discussion of Biases - Other Known Limitations - Additional Information - Dataset Curators - Licensing Information - Citation Information - Contributions ## Dataset Description - Homepage: - Repository: - Paper: - Leaderboard: - Point of Contact: Heng-Shiou Sheu ### Dataset Summary TED2020 是一個機器翻譯基準的多語言資料集,源自 OPUS 收集的使用者貢獻的翻譯,並由 OPUS。該資料集包括按語言對排序的測試和開發資料。它包括數百種語言對的測試集,並且不斷更新。請檢查版本號標籤以引用您正在使用的版本。 TED2020 收集了從1984年到2020年的演講,涵蓋了各種主題,包括科學、技術、藝術、教育、環境、社會問題等。該資料集是一個非常有價值的資源,可以用於研究和分析演講者的演講風格、主題的變化以及觀眾的反應。 ### Supported Tasks and Leaderboards ### Languages 此資料集涵蓋數百種語言和語言對,並按 ISO-639-3 語言組織。目前版本涵蓋以下語言。繁體中文、英文、日文、韓文、印尼文、越南文、泰文 ## Dataset Structure ### Data Instances 資料以 , 分隔檔案中內容,具有三個欄位:指示、輸入和輸出。請注意,我們並不暗示平移方向,並認為資料集是對稱的並用作兩個方向的測試集。 ### Data Splits 先整理出 Train 資料。 ## Dataset Creation ### Curation Rationale 本資料集將持續更新,未來將公開發佈於 Github 當中。高語言覆蓋率是本計畫的主要目標,資料集的準備與標準化語言標籤和分發格式保持一致和系統化。 ### Source Data #### Initial Data Collection and Normalization TED2020 資料集是從提交到OPUS - TED2020 的使用者貢獻的翻譯中收集的,並編譯成OPUS 中的多並行語料庫)。 #### Who are the source language producers? 這些轉錄本已由全球志工社群翻譯為超過 100 種語言。平行語料庫及其驗證程式碼可從TED取得 University of Helsinki及其language_technology_research group 管理。用於創建和使用資源的數據和工具是開源,並將作為OPUS生態系統 用於平行資料和機器翻譯研究。 ### Personal and Sensitive Information 有關處理個人資訊和敏感資訊的信息,我們請諮詢資料的原始提供者。該資料集未經過任何方式處理以檢測或刪除潛在的敏感資訊或個人資訊。 ### Social Impact of Dataset 語言覆蓋率很高,因此它代表了機器翻譯開發的非常有價值的資源,特別是對於資源較少的語言和語言對。不斷成長的資料庫也代表著一種動態資源,其價值將進一步成長。 ### Other Known Limitations 這些句子通常很短,因此很容易翻譯。對於高資源語言,這會導致結果不如更具挑戰性的基準有用。對於資源較少的語言對來說,即使在非常具有挑戰性的設定中,範例的有限複雜性實際上也是衡量進度的一件好事。 ### Dataset Curators 此資料集由Heng-Shiou Sheu 製作。 ### Licensing Information 這些資料集使用 TED Talks Usage Policy 。有關原始資料集使用條款的詳細資訊列於此處。
[ "# Dataset Card for [TED2020-TW-Corpus]", "## Table of Contents\n- Table of Contents\n- Dataset Description\n - Dataset Summary\n - Supported Tasks and Leaderboards\n - Languages\n- Dataset Structure\n - Data Instances\n - Data Fields\n - Data Splits\n- Dataset Creation\n - Curation Rationale\n - Source Data\n - Annotations\n - Personal and Sensitive Information\n- Considerations for Using the Data\n - Social Impact of Dataset\n - Discussion of Biases\n - Other Known Limitations\n- Additional Information\n - Dataset Curators\n - Licensing Information\n - Citation Information\n - Contributions", "## Dataset Description\n- Homepage: \n- Repository: \n- Paper: \n- Leaderboard:\n- Point of Contact: Heng-Shiou Sheu", "### Dataset Summary\nTED2020 是一個機器翻譯基準的多語言資料集,源自 OPUS 收集的使用者貢獻的翻譯,並由 OPUS。該資料集包括按語言對排序的測試和開發資料。它包括數百種語言對的測試集,並且不斷更新。請檢查版本號標籤以引用您正在使用的版本。\nTED2020 收集了從1984年到2020年的演講,涵蓋了各種主題,包括科學、技術、藝術、教育、環境、社會問題等。該資料集是一個非常有價值的資源,可以用於研究和分析演講者的演講風格、主題的變化以及觀眾的反應。", "### Supported Tasks and Leaderboards", "### Languages\n此資料集涵蓋數百種語言和語言對,並按 ISO-639-3 語言組織。目前版本涵蓋以下語言。繁體中文、英文、日文、韓文、印尼文、越南文、泰文", "## Dataset Structure", "### Data Instances\n\n資料以 , 分隔檔案中內容,具有三個欄位:指示、輸入和輸出。請注意,我們並不暗示平移方向,並認為資料集是對稱的並用作兩個方向的測試集。", "### Data Splits\n先整理出 Train 資料。", "## Dataset Creation", "### Curation Rationale\n本資料集將持續更新,未來將公開發佈於 Github 當中。高語言覆蓋率是本計畫的主要目標,資料集的準備與標準化語言標籤和分發格式保持一致和系統化。", "### Source Data", "#### Initial Data Collection and Normalization\nTED2020 資料集是從提交到OPUS - TED2020 的使用者貢獻的翻譯中收集的,並編譯成OPUS 中的多並行語料庫)。", "#### Who are the source language producers?\n這些轉錄本已由全球志工社群翻譯為超過 100 種語言。平行語料庫及其驗證程式碼可從TED取得\nUniversity of Helsinki及其language_technology_research group 管理。用於創建和使用資源的數據和工具是開源,並將作為OPUS生態系統 用於平行資料和機器翻譯研究。", "### Personal and Sensitive Information\n有關處理個人資訊和敏感資訊的信息,我們請諮詢資料的原始提供者。該資料集未經過任何方式處理以檢測或刪除潛在的敏感資訊或個人資訊。", "### Social Impact of Dataset\n語言覆蓋率很高,因此它代表了機器翻譯開發的非常有價值的資源,特別是對於資源較少的語言和語言對。不斷成長的資料庫也代表著一種動態資源,其價值將進一步成長。", "### Other Known Limitations\n這些句子通常很短,因此很容易翻譯。對於高資源語言,這會導致結果不如更具挑戰性的基準有用。對於資源較少的語言對來說,即使在非常具有挑戰性的設定中,範例的有限複雜性實際上也是衡量進度的一件好事。", "### Dataset Curators\n此資料集由Heng-Shiou Sheu 製作。", "### Licensing Information\n這些資料集使用 TED Talks Usage Policy 。有關原始資料集使用條款的詳細資訊列於此處。" ]
[ "TAGS\n#task_categories-translation #size_categories-10M<n<100M #language-English #language-Japanese #language-Korean #language-Indonesian #language-Vietnamese #language-Thai #language-Twi #license-unknown #taiwan #translation #Ted2020 #region-us \n", "# Dataset Card for [TED2020-TW-Corpus]", "## Table of Contents\n- Table of Contents\n- Dataset Description\n - Dataset Summary\n - Supported Tasks and Leaderboards\n - Languages\n- Dataset Structure\n - Data Instances\n - Data Fields\n - Data Splits\n- Dataset Creation\n - Curation Rationale\n - Source Data\n - Annotations\n - Personal and Sensitive Information\n- Considerations for Using the Data\n - Social Impact of Dataset\n - Discussion of Biases\n - Other Known Limitations\n- Additional Information\n - Dataset Curators\n - Licensing Information\n - Citation Information\n - Contributions", "## Dataset Description\n- Homepage: \n- Repository: \n- Paper: \n- Leaderboard:\n- Point of Contact: Heng-Shiou Sheu", "### Dataset Summary\nTED2020 是一個機器翻譯基準的多語言資料集,源自 OPUS 收集的使用者貢獻的翻譯,並由 OPUS。該資料集包括按語言對排序的測試和開發資料。它包括數百種語言對的測試集,並且不斷更新。請檢查版本號標籤以引用您正在使用的版本。\nTED2020 收集了從1984年到2020年的演講,涵蓋了各種主題,包括科學、技術、藝術、教育、環境、社會問題等。該資料集是一個非常有價值的資源,可以用於研究和分析演講者的演講風格、主題的變化以及觀眾的反應。", "### Supported Tasks and Leaderboards", "### Languages\n此資料集涵蓋數百種語言和語言對,並按 ISO-639-3 語言組織。目前版本涵蓋以下語言。繁體中文、英文、日文、韓文、印尼文、越南文、泰文", "## Dataset Structure", "### Data Instances\n\n資料以 , 分隔檔案中內容,具有三個欄位:指示、輸入和輸出。請注意,我們並不暗示平移方向,並認為資料集是對稱的並用作兩個方向的測試集。", "### Data Splits\n先整理出 Train 資料。", "## Dataset Creation", "### Curation Rationale\n本資料集將持續更新,未來將公開發佈於 Github 當中。高語言覆蓋率是本計畫的主要目標,資料集的準備與標準化語言標籤和分發格式保持一致和系統化。", "### Source Data", "#### Initial Data Collection and Normalization\nTED2020 資料集是從提交到OPUS - TED2020 的使用者貢獻的翻譯中收集的,並編譯成OPUS 中的多並行語料庫)。", "#### Who are the source language producers?\n這些轉錄本已由全球志工社群翻譯為超過 100 種語言。平行語料庫及其驗證程式碼可從TED取得\nUniversity of Helsinki及其language_technology_research group 管理。用於創建和使用資源的數據和工具是開源,並將作為OPUS生態系統 用於平行資料和機器翻譯研究。", "### Personal and Sensitive Information\n有關處理個人資訊和敏感資訊的信息,我們請諮詢資料的原始提供者。該資料集未經過任何方式處理以檢測或刪除潛在的敏感資訊或個人資訊。", "### Social Impact of Dataset\n語言覆蓋率很高,因此它代表了機器翻譯開發的非常有價值的資源,特別是對於資源較少的語言和語言對。不斷成長的資料庫也代表著一種動態資源,其價值將進一步成長。", "### Other Known Limitations\n這些句子通常很短,因此很容易翻譯。對於高資源語言,這會導致結果不如更具挑戰性的基準有用。對於資源較少的語言對來說,即使在非常具有挑戰性的設定中,範例的有限複雜性實際上也是衡量進度的一件好事。", "### Dataset Curators\n此資料集由Heng-Shiou Sheu 製作。", "### Licensing Information\n這些資料集使用 TED Talks Usage Policy 。有關原始資料集使用條款的詳細資訊列於此處。" ]
a65550bfe7f1949ba55bc1de5018e245a10d90d4
# Dataset Card for Evaluation run of Steelskull/Lumosia-v2-MoE-4x10.7 <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [Steelskull/Lumosia-v2-MoE-4x10.7](https://huggingface.co/Steelskull/Lumosia-v2-MoE-4x10.7) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_Steelskull__Lumosia-v2-MoE-4x10.7", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-02-02T08:19:21.300026](https://huggingface.co/datasets/open-llm-leaderboard/details_Steelskull__Lumosia-v2-MoE-4x10.7/blob/main/results_2024-02-02T08-19-21.300026.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.6680685275478645, "acc_stderr": 0.0315547578178304, "acc_norm": 0.6687810115447916, "acc_norm_stderr": 0.032201365533529785, "mc1": 0.5324357405140759, "mc1_stderr": 0.017466632149577617, "mc2": 0.6847502236527627, "mc2_stderr": 0.015252351834031837 }, "harness|arc:challenge|25": { "acc": 0.6808873720136519, "acc_stderr": 0.013621696119173307, "acc_norm": 0.7039249146757679, "acc_norm_stderr": 0.013340916085246252 }, "harness|hellaswag|10": { "acc": 0.7024497112129058, "acc_stderr": 0.004562462665505232, "acc_norm": 0.8787094204341764, "acc_norm_stderr": 0.003257974593789941 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.42, "acc_stderr": 0.04960449637488583, "acc_norm": 0.42, "acc_norm_stderr": 0.04960449637488583 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.6074074074074074, "acc_stderr": 0.04218506215368879, "acc_norm": 0.6074074074074074, "acc_norm_stderr": 0.04218506215368879 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.743421052631579, "acc_stderr": 0.0355418036802569, "acc_norm": 0.743421052631579, "acc_norm_stderr": 0.0355418036802569 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.74, "acc_stderr": 0.0440844002276808, "acc_norm": 0.74, "acc_norm_stderr": 0.0440844002276808 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.6830188679245283, "acc_stderr": 0.02863723563980089, "acc_norm": 0.6830188679245283, "acc_norm_stderr": 0.02863723563980089 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.7638888888888888, "acc_stderr": 0.03551446610810826, "acc_norm": 0.7638888888888888, "acc_norm_stderr": 0.03551446610810826 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.44, "acc_stderr": 0.04988876515698589, "acc_norm": 0.44, "acc_norm_stderr": 0.04988876515698589 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.5, "acc_stderr": 0.050251890762960605, "acc_norm": 0.5, "acc_norm_stderr": 0.050251890762960605 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.34, "acc_stderr": 0.04760952285695236, "acc_norm": 0.34, "acc_norm_stderr": 0.04760952285695236 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.6705202312138728, "acc_stderr": 0.03583901754736412, "acc_norm": 0.6705202312138728, "acc_norm_stderr": 0.03583901754736412 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.38235294117647056, "acc_stderr": 0.04835503696107223, "acc_norm": 0.38235294117647056, "acc_norm_stderr": 0.04835503696107223 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.76, "acc_stderr": 0.042923469599092816, "acc_norm": 0.76, "acc_norm_stderr": 0.042923469599092816 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.6170212765957447, "acc_stderr": 0.03177821250236922, "acc_norm": 0.6170212765957447, "acc_norm_stderr": 0.03177821250236922 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.5, "acc_stderr": 0.047036043419179864, "acc_norm": 0.5, "acc_norm_stderr": 0.047036043419179864 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.6344827586206897, "acc_stderr": 0.04013124195424386, "acc_norm": 0.6344827586206897, "acc_norm_stderr": 0.04013124195424386 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.47883597883597884, "acc_stderr": 0.025728230952130733, "acc_norm": 0.47883597883597884, "acc_norm_stderr": 0.025728230952130733 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.42857142857142855, "acc_stderr": 0.0442626668137991, "acc_norm": 0.42857142857142855, "acc_norm_stderr": 0.0442626668137991 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.36, "acc_stderr": 0.048241815132442176, "acc_norm": 0.36, "acc_norm_stderr": 0.048241815132442176 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.8129032258064516, "acc_stderr": 0.022185710092252255, "acc_norm": 0.8129032258064516, "acc_norm_stderr": 0.022185710092252255 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.5172413793103449, "acc_stderr": 0.035158955511656986, "acc_norm": 0.5172413793103449, "acc_norm_stderr": 0.035158955511656986 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.7, "acc_stderr": 0.046056618647183814, "acc_norm": 0.7, "acc_norm_stderr": 0.046056618647183814 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.8, "acc_stderr": 0.031234752377721175, "acc_norm": 0.8, "acc_norm_stderr": 0.031234752377721175 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.8636363636363636, "acc_stderr": 0.024450155973189835, "acc_norm": 0.8636363636363636, "acc_norm_stderr": 0.024450155973189835 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.9015544041450777, "acc_stderr": 0.021500249576033467, "acc_norm": 0.9015544041450777, "acc_norm_stderr": 0.021500249576033467 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.6564102564102564, "acc_stderr": 0.02407869658063547, "acc_norm": 0.6564102564102564, "acc_norm_stderr": 0.02407869658063547 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.35555555555555557, "acc_stderr": 0.02918571494985741, "acc_norm": 0.35555555555555557, "acc_norm_stderr": 0.02918571494985741 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.7226890756302521, "acc_stderr": 0.02907937453948001, "acc_norm": 0.7226890756302521, "acc_norm_stderr": 0.02907937453948001 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.3708609271523179, "acc_stderr": 0.03943966699183629, "acc_norm": 0.3708609271523179, "acc_norm_stderr": 0.03943966699183629 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.8422018348623853, "acc_stderr": 0.01563002297009246, "acc_norm": 0.8422018348623853, "acc_norm_stderr": 0.01563002297009246 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.5648148148148148, "acc_stderr": 0.03381200005643526, "acc_norm": 0.5648148148148148, "acc_norm_stderr": 0.03381200005643526 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.8529411764705882, "acc_stderr": 0.02485747808025046, "acc_norm": 0.8529411764705882, "acc_norm_stderr": 0.02485747808025046 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.8565400843881856, "acc_stderr": 0.022818291821017012, "acc_norm": 0.8565400843881856, "acc_norm_stderr": 0.022818291821017012 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.6860986547085202, "acc_stderr": 0.03114679648297246, "acc_norm": 0.6860986547085202, "acc_norm_stderr": 0.03114679648297246 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.7633587786259542, "acc_stderr": 0.03727673575596915, "acc_norm": 0.7633587786259542, "acc_norm_stderr": 0.03727673575596915 }, "harness|hendrycksTest-international_law|5": { "acc": 0.8016528925619835, "acc_stderr": 0.036401182719909456, "acc_norm": 0.8016528925619835, "acc_norm_stderr": 0.036401182719909456 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.8148148148148148, "acc_stderr": 0.03755265865037182, "acc_norm": 0.8148148148148148, "acc_norm_stderr": 0.03755265865037182 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.7484662576687117, "acc_stderr": 0.03408997886857529, "acc_norm": 0.7484662576687117, "acc_norm_stderr": 0.03408997886857529 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.48214285714285715, "acc_stderr": 0.047427623612430116, "acc_norm": 0.48214285714285715, "acc_norm_stderr": 0.047427623612430116 }, "harness|hendrycksTest-management|5": { "acc": 0.8543689320388349, "acc_stderr": 0.03492606476623791, "acc_norm": 0.8543689320388349, "acc_norm_stderr": 0.03492606476623791 }, "harness|hendrycksTest-marketing|5": { "acc": 0.8632478632478633, "acc_stderr": 0.022509033937077812, "acc_norm": 0.8632478632478633, "acc_norm_stderr": 0.022509033937077812 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.72, "acc_stderr": 0.045126085985421276, "acc_norm": 0.72, "acc_norm_stderr": 0.045126085985421276 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.8148148148148148, "acc_stderr": 0.013890862162876168, "acc_norm": 0.8148148148148148, "acc_norm_stderr": 0.013890862162876168 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.7630057803468208, "acc_stderr": 0.02289408248992599, "acc_norm": 0.7630057803468208, "acc_norm_stderr": 0.02289408248992599 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.4201117318435754, "acc_stderr": 0.016507671073256402, "acc_norm": 0.4201117318435754, "acc_norm_stderr": 0.016507671073256402 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.761437908496732, "acc_stderr": 0.024404394928087866, "acc_norm": 0.761437908496732, "acc_norm_stderr": 0.024404394928087866 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.7170418006430869, "acc_stderr": 0.025583062489984824, "acc_norm": 0.7170418006430869, "acc_norm_stderr": 0.025583062489984824 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.7777777777777778, "acc_stderr": 0.02313237623454334, "acc_norm": 0.7777777777777778, "acc_norm_stderr": 0.02313237623454334 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.5070921985815603, "acc_stderr": 0.02982449855912901, "acc_norm": 0.5070921985815603, "acc_norm_stderr": 0.02982449855912901 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.49282920469361147, "acc_stderr": 0.012768922739553308, "acc_norm": 0.49282920469361147, "acc_norm_stderr": 0.012768922739553308 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.7463235294117647, "acc_stderr": 0.026431329870789527, "acc_norm": 0.7463235294117647, "acc_norm_stderr": 0.026431329870789527 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.6895424836601307, "acc_stderr": 0.018718067052623216, "acc_norm": 0.6895424836601307, "acc_norm_stderr": 0.018718067052623216 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.6909090909090909, "acc_stderr": 0.044262946482000985, "acc_norm": 0.6909090909090909, "acc_norm_stderr": 0.044262946482000985 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.7428571428571429, "acc_stderr": 0.027979823538744546, "acc_norm": 0.7428571428571429, "acc_norm_stderr": 0.027979823538744546 }, "harness|hendrycksTest-sociology|5": { "acc": 0.8308457711442786, "acc_stderr": 0.02650859065623327, "acc_norm": 0.8308457711442786, "acc_norm_stderr": 0.02650859065623327 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.92, "acc_stderr": 0.0272659924344291, "acc_norm": 0.92, "acc_norm_stderr": 0.0272659924344291 }, "harness|hendrycksTest-virology|5": { "acc": 0.5903614457831325, "acc_stderr": 0.038284011150790206, "acc_norm": 0.5903614457831325, "acc_norm_stderr": 0.038284011150790206 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.7894736842105263, "acc_stderr": 0.03126781714663179, "acc_norm": 0.7894736842105263, "acc_norm_stderr": 0.03126781714663179 }, "harness|truthfulqa:mc|0": { "mc1": 0.5324357405140759, "mc1_stderr": 0.017466632149577617, "mc2": 0.6847502236527627, "mc2_stderr": 0.015252351834031837 }, "harness|winogrande|5": { "acc": 0.8421468034727704, "acc_stderr": 0.010247165248719763 }, "harness|gsm8k|5": { "acc": 0.6512509476876421, "acc_stderr": 0.013127227055035863 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_Steelskull__Lumosia-v2-MoE-4x10.7
[ "region:us" ]
2024-02-02T08:21:43+00:00
{"pretty_name": "Evaluation run of Steelskull/Lumosia-v2-MoE-4x10.7", "dataset_summary": "Dataset automatically created during the evaluation run of model [Steelskull/Lumosia-v2-MoE-4x10.7](https://huggingface.co/Steelskull/Lumosia-v2-MoE-4x10.7) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_Steelskull__Lumosia-v2-MoE-4x10.7\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-02-02T08:19:21.300026](https://huggingface.co/datasets/open-llm-leaderboard/details_Steelskull__Lumosia-v2-MoE-4x10.7/blob/main/results_2024-02-02T08-19-21.300026.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.6680685275478645,\n \"acc_stderr\": 0.0315547578178304,\n \"acc_norm\": 0.6687810115447916,\n \"acc_norm_stderr\": 0.032201365533529785,\n \"mc1\": 0.5324357405140759,\n \"mc1_stderr\": 0.017466632149577617,\n \"mc2\": 0.6847502236527627,\n \"mc2_stderr\": 0.015252351834031837\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.6808873720136519,\n \"acc_stderr\": 0.013621696119173307,\n \"acc_norm\": 0.7039249146757679,\n \"acc_norm_stderr\": 0.013340916085246252\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.7024497112129058,\n \"acc_stderr\": 0.004562462665505232,\n \"acc_norm\": 0.8787094204341764,\n \"acc_norm_stderr\": 0.003257974593789941\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.42,\n \"acc_stderr\": 0.04960449637488583,\n \"acc_norm\": 0.42,\n \"acc_norm_stderr\": 0.04960449637488583\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.6074074074074074,\n \"acc_stderr\": 0.04218506215368879,\n \"acc_norm\": 0.6074074074074074,\n \"acc_norm_stderr\": 0.04218506215368879\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.743421052631579,\n \"acc_stderr\": 0.0355418036802569,\n \"acc_norm\": 0.743421052631579,\n \"acc_norm_stderr\": 0.0355418036802569\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.74,\n \"acc_stderr\": 0.0440844002276808,\n \"acc_norm\": 0.74,\n \"acc_norm_stderr\": 0.0440844002276808\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.6830188679245283,\n \"acc_stderr\": 0.02863723563980089,\n \"acc_norm\": 0.6830188679245283,\n \"acc_norm_stderr\": 0.02863723563980089\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.7638888888888888,\n \"acc_stderr\": 0.03551446610810826,\n \"acc_norm\": 0.7638888888888888,\n \"acc_norm_stderr\": 0.03551446610810826\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.44,\n \"acc_stderr\": 0.04988876515698589,\n \"acc_norm\": 0.44,\n \"acc_norm_stderr\": 0.04988876515698589\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.5,\n \"acc_stderr\": 0.050251890762960605,\n \"acc_norm\": 0.5,\n \"acc_norm_stderr\": 0.050251890762960605\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.34,\n \"acc_stderr\": 0.04760952285695236,\n \"acc_norm\": 0.34,\n \"acc_norm_stderr\": 0.04760952285695236\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.6705202312138728,\n \"acc_stderr\": 0.03583901754736412,\n \"acc_norm\": 0.6705202312138728,\n \"acc_norm_stderr\": 0.03583901754736412\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.38235294117647056,\n \"acc_stderr\": 0.04835503696107223,\n \"acc_norm\": 0.38235294117647056,\n \"acc_norm_stderr\": 0.04835503696107223\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.76,\n \"acc_stderr\": 0.042923469599092816,\n \"acc_norm\": 0.76,\n \"acc_norm_stderr\": 0.042923469599092816\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.6170212765957447,\n \"acc_stderr\": 0.03177821250236922,\n \"acc_norm\": 0.6170212765957447,\n \"acc_norm_stderr\": 0.03177821250236922\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.5,\n \"acc_stderr\": 0.047036043419179864,\n \"acc_norm\": 0.5,\n \"acc_norm_stderr\": 0.047036043419179864\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.6344827586206897,\n \"acc_stderr\": 0.04013124195424386,\n \"acc_norm\": 0.6344827586206897,\n \"acc_norm_stderr\": 0.04013124195424386\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.47883597883597884,\n \"acc_stderr\": 0.025728230952130733,\n \"acc_norm\": 0.47883597883597884,\n \"acc_norm_stderr\": 0.025728230952130733\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.42857142857142855,\n \"acc_stderr\": 0.0442626668137991,\n \"acc_norm\": 0.42857142857142855,\n \"acc_norm_stderr\": 0.0442626668137991\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.36,\n \"acc_stderr\": 0.048241815132442176,\n \"acc_norm\": 0.36,\n \"acc_norm_stderr\": 0.048241815132442176\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.8129032258064516,\n \"acc_stderr\": 0.022185710092252255,\n \"acc_norm\": 0.8129032258064516,\n \"acc_norm_stderr\": 0.022185710092252255\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.5172413793103449,\n \"acc_stderr\": 0.035158955511656986,\n \"acc_norm\": 0.5172413793103449,\n \"acc_norm_stderr\": 0.035158955511656986\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.7,\n \"acc_stderr\": 0.046056618647183814,\n \"acc_norm\": 0.7,\n \"acc_norm_stderr\": 0.046056618647183814\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.8,\n \"acc_stderr\": 0.031234752377721175,\n \"acc_norm\": 0.8,\n \"acc_norm_stderr\": 0.031234752377721175\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.8636363636363636,\n \"acc_stderr\": 0.024450155973189835,\n \"acc_norm\": 0.8636363636363636,\n \"acc_norm_stderr\": 0.024450155973189835\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.9015544041450777,\n \"acc_stderr\": 0.021500249576033467,\n \"acc_norm\": 0.9015544041450777,\n \"acc_norm_stderr\": 0.021500249576033467\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.6564102564102564,\n \"acc_stderr\": 0.02407869658063547,\n \"acc_norm\": 0.6564102564102564,\n \"acc_norm_stderr\": 0.02407869658063547\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.35555555555555557,\n \"acc_stderr\": 0.02918571494985741,\n \"acc_norm\": 0.35555555555555557,\n \"acc_norm_stderr\": 0.02918571494985741\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.7226890756302521,\n \"acc_stderr\": 0.02907937453948001,\n \"acc_norm\": 0.7226890756302521,\n \"acc_norm_stderr\": 0.02907937453948001\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.3708609271523179,\n \"acc_stderr\": 0.03943966699183629,\n \"acc_norm\": 0.3708609271523179,\n \"acc_norm_stderr\": 0.03943966699183629\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.8422018348623853,\n \"acc_stderr\": 0.01563002297009246,\n \"acc_norm\": 0.8422018348623853,\n \"acc_norm_stderr\": 0.01563002297009246\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.5648148148148148,\n \"acc_stderr\": 0.03381200005643526,\n \"acc_norm\": 0.5648148148148148,\n \"acc_norm_stderr\": 0.03381200005643526\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.8529411764705882,\n \"acc_stderr\": 0.02485747808025046,\n \"acc_norm\": 0.8529411764705882,\n \"acc_norm_stderr\": 0.02485747808025046\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.8565400843881856,\n \"acc_stderr\": 0.022818291821017012,\n \"acc_norm\": 0.8565400843881856,\n \"acc_norm_stderr\": 0.022818291821017012\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.6860986547085202,\n \"acc_stderr\": 0.03114679648297246,\n \"acc_norm\": 0.6860986547085202,\n \"acc_norm_stderr\": 0.03114679648297246\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.7633587786259542,\n \"acc_stderr\": 0.03727673575596915,\n \"acc_norm\": 0.7633587786259542,\n \"acc_norm_stderr\": 0.03727673575596915\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.8016528925619835,\n \"acc_stderr\": 0.036401182719909456,\n \"acc_norm\": 0.8016528925619835,\n \"acc_norm_stderr\": 0.036401182719909456\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.8148148148148148,\n \"acc_stderr\": 0.03755265865037182,\n \"acc_norm\": 0.8148148148148148,\n \"acc_norm_stderr\": 0.03755265865037182\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.7484662576687117,\n \"acc_stderr\": 0.03408997886857529,\n \"acc_norm\": 0.7484662576687117,\n \"acc_norm_stderr\": 0.03408997886857529\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.48214285714285715,\n \"acc_stderr\": 0.047427623612430116,\n \"acc_norm\": 0.48214285714285715,\n \"acc_norm_stderr\": 0.047427623612430116\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.8543689320388349,\n \"acc_stderr\": 0.03492606476623791,\n \"acc_norm\": 0.8543689320388349,\n \"acc_norm_stderr\": 0.03492606476623791\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8632478632478633,\n \"acc_stderr\": 0.022509033937077812,\n \"acc_norm\": 0.8632478632478633,\n \"acc_norm_stderr\": 0.022509033937077812\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.72,\n \"acc_stderr\": 0.045126085985421276,\n \"acc_norm\": 0.72,\n \"acc_norm_stderr\": 0.045126085985421276\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.8148148148148148,\n \"acc_stderr\": 0.013890862162876168,\n \"acc_norm\": 0.8148148148148148,\n \"acc_norm_stderr\": 0.013890862162876168\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.7630057803468208,\n \"acc_stderr\": 0.02289408248992599,\n \"acc_norm\": 0.7630057803468208,\n \"acc_norm_stderr\": 0.02289408248992599\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.4201117318435754,\n \"acc_stderr\": 0.016507671073256402,\n \"acc_norm\": 0.4201117318435754,\n \"acc_norm_stderr\": 0.016507671073256402\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.761437908496732,\n \"acc_stderr\": 0.024404394928087866,\n \"acc_norm\": 0.761437908496732,\n \"acc_norm_stderr\": 0.024404394928087866\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.7170418006430869,\n \"acc_stderr\": 0.025583062489984824,\n \"acc_norm\": 0.7170418006430869,\n \"acc_norm_stderr\": 0.025583062489984824\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.7777777777777778,\n \"acc_stderr\": 0.02313237623454334,\n \"acc_norm\": 0.7777777777777778,\n \"acc_norm_stderr\": 0.02313237623454334\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.5070921985815603,\n \"acc_stderr\": 0.02982449855912901,\n \"acc_norm\": 0.5070921985815603,\n \"acc_norm_stderr\": 0.02982449855912901\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.49282920469361147,\n \"acc_stderr\": 0.012768922739553308,\n \"acc_norm\": 0.49282920469361147,\n \"acc_norm_stderr\": 0.012768922739553308\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.7463235294117647,\n \"acc_stderr\": 0.026431329870789527,\n \"acc_norm\": 0.7463235294117647,\n \"acc_norm_stderr\": 0.026431329870789527\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.6895424836601307,\n \"acc_stderr\": 0.018718067052623216,\n \"acc_norm\": 0.6895424836601307,\n \"acc_norm_stderr\": 0.018718067052623216\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6909090909090909,\n \"acc_stderr\": 0.044262946482000985,\n \"acc_norm\": 0.6909090909090909,\n \"acc_norm_stderr\": 0.044262946482000985\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.7428571428571429,\n \"acc_stderr\": 0.027979823538744546,\n \"acc_norm\": 0.7428571428571429,\n \"acc_norm_stderr\": 0.027979823538744546\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.8308457711442786,\n \"acc_stderr\": 0.02650859065623327,\n \"acc_norm\": 0.8308457711442786,\n \"acc_norm_stderr\": 0.02650859065623327\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.92,\n \"acc_stderr\": 0.0272659924344291,\n \"acc_norm\": 0.92,\n \"acc_norm_stderr\": 0.0272659924344291\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.5903614457831325,\n \"acc_stderr\": 0.038284011150790206,\n \"acc_norm\": 0.5903614457831325,\n \"acc_norm_stderr\": 0.038284011150790206\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.7894736842105263,\n \"acc_stderr\": 0.03126781714663179,\n \"acc_norm\": 0.7894736842105263,\n \"acc_norm_stderr\": 0.03126781714663179\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.5324357405140759,\n \"mc1_stderr\": 0.017466632149577617,\n \"mc2\": 0.6847502236527627,\n \"mc2_stderr\": 0.015252351834031837\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.8421468034727704,\n \"acc_stderr\": 0.010247165248719763\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.6512509476876421,\n \"acc_stderr\": 0.013127227055035863\n }\n}\n```", "repo_url": "https://huggingface.co/Steelskull/Lumosia-v2-MoE-4x10.7", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_02_02T08_19_21.300026", "path": ["**/details_harness|arc:challenge|25_2024-02-02T08-19-21.300026.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-02-02T08-19-21.300026.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_02_02T08_19_21.300026", "path": ["**/details_harness|gsm8k|5_2024-02-02T08-19-21.300026.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-02-02T08-19-21.300026.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_02_02T08_19_21.300026", "path": ["**/details_harness|hellaswag|10_2024-02-02T08-19-21.300026.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-02-02T08-19-21.300026.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_02_02T08_19_21.300026", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-02T08-19-21.300026.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-02-02T08-19-21.300026.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-02-02T08-19-21.300026.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-02-02T08-19-21.300026.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-02T08-19-21.300026.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-02-02T08-19-21.300026.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-02-02T08-19-21.300026.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-02-02T08-19-21.300026.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-02-02T08-19-21.300026.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-02-02T08-19-21.300026.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-02-02T08-19-21.300026.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-02-02T08-19-21.300026.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-02T08-19-21.300026.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-02-02T08-19-21.300026.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-02T08-19-21.300026.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-02T08-19-21.300026.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-02-02T08-19-21.300026.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-02-02T08-19-21.300026.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-02-02T08-19-21.300026.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-02T08-19-21.300026.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-02T08-19-21.300026.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-02T08-19-21.300026.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-02-02T08-19-21.300026.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-02T08-19-21.300026.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-02T08-19-21.300026.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-02T08-19-21.300026.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-02T08-19-21.300026.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-02-02T08-19-21.300026.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-02T08-19-21.300026.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-02T08-19-21.300026.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-02T08-19-21.300026.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-02T08-19-21.300026.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-02-02T08-19-21.300026.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-02-02T08-19-21.300026.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-02-02T08-19-21.300026.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-02-02T08-19-21.300026.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-02T08-19-21.300026.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-02-02T08-19-21.300026.parquet", "**/details_harness|hendrycksTest-management|5_2024-02-02T08-19-21.300026.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-02-02T08-19-21.300026.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-02-02T08-19-21.300026.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-02-02T08-19-21.300026.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-02-02T08-19-21.300026.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-02T08-19-21.300026.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-02-02T08-19-21.300026.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-02-02T08-19-21.300026.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-02-02T08-19-21.300026.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-02-02T08-19-21.300026.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-02-02T08-19-21.300026.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-02-02T08-19-21.300026.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-02-02T08-19-21.300026.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-02-02T08-19-21.300026.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-02-02T08-19-21.300026.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-02-02T08-19-21.300026.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-02T08-19-21.300026.parquet", "**/details_harness|hendrycksTest-virology|5_2024-02-02T08-19-21.300026.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-02-02T08-19-21.300026.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-02T08-19-21.300026.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-02-02T08-19-21.300026.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-02-02T08-19-21.300026.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-02-02T08-19-21.300026.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-02T08-19-21.300026.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-02-02T08-19-21.300026.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-02-02T08-19-21.300026.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-02-02T08-19-21.300026.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-02-02T08-19-21.300026.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-02-02T08-19-21.300026.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-02-02T08-19-21.300026.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-02-02T08-19-21.300026.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-02T08-19-21.300026.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-02-02T08-19-21.300026.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-02T08-19-21.300026.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-02T08-19-21.300026.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-02-02T08-19-21.300026.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-02-02T08-19-21.300026.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-02-02T08-19-21.300026.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-02T08-19-21.300026.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-02T08-19-21.300026.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-02T08-19-21.300026.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-02-02T08-19-21.300026.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-02T08-19-21.300026.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-02T08-19-21.300026.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-02T08-19-21.300026.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-02T08-19-21.300026.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-02-02T08-19-21.300026.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-02T08-19-21.300026.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-02T08-19-21.300026.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-02T08-19-21.300026.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-02T08-19-21.300026.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-02-02T08-19-21.300026.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-02-02T08-19-21.300026.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-02-02T08-19-21.300026.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-02-02T08-19-21.300026.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-02T08-19-21.300026.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-02-02T08-19-21.300026.parquet", "**/details_harness|hendrycksTest-management|5_2024-02-02T08-19-21.300026.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-02-02T08-19-21.300026.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-02-02T08-19-21.300026.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-02-02T08-19-21.300026.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-02-02T08-19-21.300026.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-02T08-19-21.300026.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-02-02T08-19-21.300026.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-02-02T08-19-21.300026.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-02-02T08-19-21.300026.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-02-02T08-19-21.300026.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-02-02T08-19-21.300026.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-02-02T08-19-21.300026.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-02-02T08-19-21.300026.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-02-02T08-19-21.300026.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-02-02T08-19-21.300026.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-02-02T08-19-21.300026.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-02T08-19-21.300026.parquet", "**/details_harness|hendrycksTest-virology|5_2024-02-02T08-19-21.300026.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-02-02T08-19-21.300026.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_02_02T08_19_21.300026", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-02T08-19-21.300026.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-02T08-19-21.300026.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_02_02T08_19_21.300026", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-02-02T08-19-21.300026.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-02-02T08-19-21.300026.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_02_02T08_19_21.300026", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-02-02T08-19-21.300026.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-02-02T08-19-21.300026.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_02_02T08_19_21.300026", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-02-02T08-19-21.300026.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-02-02T08-19-21.300026.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_02_02T08_19_21.300026", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-02T08-19-21.300026.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-02T08-19-21.300026.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_02_02T08_19_21.300026", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-02-02T08-19-21.300026.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-02-02T08-19-21.300026.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_02_02T08_19_21.300026", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-02-02T08-19-21.300026.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-02-02T08-19-21.300026.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_02_02T08_19_21.300026", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-02-02T08-19-21.300026.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-02-02T08-19-21.300026.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_02_02T08_19_21.300026", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-02-02T08-19-21.300026.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-02-02T08-19-21.300026.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_02_02T08_19_21.300026", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-02-02T08-19-21.300026.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-02-02T08-19-21.300026.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_02_02T08_19_21.300026", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-02-02T08-19-21.300026.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-02-02T08-19-21.300026.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_02_02T08_19_21.300026", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-02-02T08-19-21.300026.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-02-02T08-19-21.300026.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_02_02T08_19_21.300026", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-02T08-19-21.300026.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-02T08-19-21.300026.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_02_02T08_19_21.300026", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-02-02T08-19-21.300026.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-02-02T08-19-21.300026.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_02_02T08_19_21.300026", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-02T08-19-21.300026.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-02T08-19-21.300026.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_02_02T08_19_21.300026", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-02T08-19-21.300026.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-02T08-19-21.300026.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_02_02T08_19_21.300026", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-02-02T08-19-21.300026.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-02-02T08-19-21.300026.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_02_02T08_19_21.300026", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-02-02T08-19-21.300026.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-02-02T08-19-21.300026.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_02_02T08_19_21.300026", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-02-02T08-19-21.300026.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-02-02T08-19-21.300026.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_02_02T08_19_21.300026", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-02T08-19-21.300026.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-02T08-19-21.300026.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_02_02T08_19_21.300026", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-02T08-19-21.300026.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-02T08-19-21.300026.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_02_02T08_19_21.300026", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-02T08-19-21.300026.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-02T08-19-21.300026.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_02_02T08_19_21.300026", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-02-02T08-19-21.300026.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-02-02T08-19-21.300026.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_02_02T08_19_21.300026", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-02T08-19-21.300026.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-02T08-19-21.300026.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_02_02T08_19_21.300026", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-02T08-19-21.300026.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-02T08-19-21.300026.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_02_02T08_19_21.300026", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-02T08-19-21.300026.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-02T08-19-21.300026.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_02_02T08_19_21.300026", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-02T08-19-21.300026.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-02T08-19-21.300026.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_02_02T08_19_21.300026", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-02-02T08-19-21.300026.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-02-02T08-19-21.300026.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_02_02T08_19_21.300026", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-02T08-19-21.300026.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-02T08-19-21.300026.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_02_02T08_19_21.300026", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-02T08-19-21.300026.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-02T08-19-21.300026.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_02_02T08_19_21.300026", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-02T08-19-21.300026.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-02T08-19-21.300026.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_02_02T08_19_21.300026", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-02T08-19-21.300026.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-02T08-19-21.300026.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_02_02T08_19_21.300026", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-02-02T08-19-21.300026.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-02-02T08-19-21.300026.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_02_02T08_19_21.300026", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-02-02T08-19-21.300026.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-02-02T08-19-21.300026.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_02_02T08_19_21.300026", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-02-02T08-19-21.300026.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-02-02T08-19-21.300026.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_02_02T08_19_21.300026", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-02-02T08-19-21.300026.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-02-02T08-19-21.300026.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_02_02T08_19_21.300026", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-02T08-19-21.300026.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-02T08-19-21.300026.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_02_02T08_19_21.300026", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-02-02T08-19-21.300026.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-02-02T08-19-21.300026.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_02_02T08_19_21.300026", "path": ["**/details_harness|hendrycksTest-management|5_2024-02-02T08-19-21.300026.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-02-02T08-19-21.300026.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_02_02T08_19_21.300026", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-02-02T08-19-21.300026.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-02-02T08-19-21.300026.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_02_02T08_19_21.300026", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-02-02T08-19-21.300026.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-02-02T08-19-21.300026.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_02_02T08_19_21.300026", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-02-02T08-19-21.300026.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-02-02T08-19-21.300026.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_02_02T08_19_21.300026", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-02-02T08-19-21.300026.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-02-02T08-19-21.300026.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_02_02T08_19_21.300026", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-02T08-19-21.300026.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-02T08-19-21.300026.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_02_02T08_19_21.300026", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-02-02T08-19-21.300026.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-02-02T08-19-21.300026.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_02_02T08_19_21.300026", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-02-02T08-19-21.300026.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-02-02T08-19-21.300026.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_02_02T08_19_21.300026", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-02-02T08-19-21.300026.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-02-02T08-19-21.300026.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_02_02T08_19_21.300026", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-02-02T08-19-21.300026.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-02-02T08-19-21.300026.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_02_02T08_19_21.300026", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-02-02T08-19-21.300026.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-02-02T08-19-21.300026.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_02_02T08_19_21.300026", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-02-02T08-19-21.300026.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-02-02T08-19-21.300026.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_02_02T08_19_21.300026", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-02-02T08-19-21.300026.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-02-02T08-19-21.300026.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_02_02T08_19_21.300026", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-02-02T08-19-21.300026.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-02-02T08-19-21.300026.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_02_02T08_19_21.300026", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-02-02T08-19-21.300026.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-02-02T08-19-21.300026.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_02_02T08_19_21.300026", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-02-02T08-19-21.300026.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-02-02T08-19-21.300026.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_02_02T08_19_21.300026", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-02T08-19-21.300026.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-02T08-19-21.300026.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_02_02T08_19_21.300026", "path": ["**/details_harness|hendrycksTest-virology|5_2024-02-02T08-19-21.300026.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-02-02T08-19-21.300026.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_02_02T08_19_21.300026", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-02-02T08-19-21.300026.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-02-02T08-19-21.300026.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_02_02T08_19_21.300026", "path": ["**/details_harness|truthfulqa:mc|0_2024-02-02T08-19-21.300026.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-02-02T08-19-21.300026.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_02_02T08_19_21.300026", "path": ["**/details_harness|winogrande|5_2024-02-02T08-19-21.300026.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-02-02T08-19-21.300026.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_02_02T08_19_21.300026", "path": ["results_2024-02-02T08-19-21.300026.parquet"]}, {"split": "latest", "path": ["results_2024-02-02T08-19-21.300026.parquet"]}]}]}
2024-02-02T08:22:06+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of Steelskull/Lumosia-v2-MoE-4x10.7 Dataset automatically created during the evaluation run of model Steelskull/Lumosia-v2-MoE-4x10.7 on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-02-02T08:19:21.300026(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of Steelskull/Lumosia-v2-MoE-4x10.7\n\n\n\nDataset automatically created during the evaluation run of model Steelskull/Lumosia-v2-MoE-4x10.7 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-02-02T08:19:21.300026(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of Steelskull/Lumosia-v2-MoE-4x10.7\n\n\n\nDataset automatically created during the evaluation run of model Steelskull/Lumosia-v2-MoE-4x10.7 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-02-02T08:19:21.300026(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
e8bd9ea6af434d927a190d69ae77be41ce42be10
# Dataset Card for Evaluation run of rizla/rizla54 <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [rizla/rizla54](https://huggingface.co/rizla/rizla54) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_rizla__rizla54", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-02-02T08:26:50.989261](https://huggingface.co/datasets/open-llm-leaderboard/details_rizla__rizla54/blob/main/results_2024-02-02T08-26-50.989261.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.6070754715230492, "acc_stderr": 0.033251216013816025, "acc_norm": 0.6153378461064626, "acc_norm_stderr": 0.03397161363942805, "mc1": 0.37209302325581395, "mc1_stderr": 0.016921090118814038, "mc2": 0.5325609210892159, "mc2_stderr": 0.015366351468634187 }, "harness|arc:challenge|25": { "acc": 0.5392491467576792, "acc_stderr": 0.014566303676636586, "acc_norm": 0.5819112627986348, "acc_norm_stderr": 0.014413988396996081 }, "harness|hellaswag|10": { "acc": 0.5825532762397929, "acc_stderr": 0.004921300331285573, "acc_norm": 0.7873929496116312, "acc_norm_stderr": 0.004083157276012493 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.3, "acc_stderr": 0.04605661864718381, "acc_norm": 0.3, "acc_norm_stderr": 0.04605661864718381 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.6222222222222222, "acc_stderr": 0.04188307537595852, "acc_norm": 0.6222222222222222, "acc_norm_stderr": 0.04188307537595852 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.743421052631579, "acc_stderr": 0.0355418036802569, "acc_norm": 0.743421052631579, "acc_norm_stderr": 0.0355418036802569 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.67, "acc_stderr": 0.04725815626252607, "acc_norm": 0.67, "acc_norm_stderr": 0.04725815626252607 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.6226415094339622, "acc_stderr": 0.029832808114796005, "acc_norm": 0.6226415094339622, "acc_norm_stderr": 0.029832808114796005 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.6527777777777778, "acc_stderr": 0.039812405437178615, "acc_norm": 0.6527777777777778, "acc_norm_stderr": 0.039812405437178615 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.49, "acc_stderr": 0.05024183937956912, "acc_norm": 0.49, "acc_norm_stderr": 0.05024183937956912 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.5, "acc_stderr": 0.050251890762960605, "acc_norm": 0.5, "acc_norm_stderr": 0.050251890762960605 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.32, "acc_stderr": 0.046882617226215034, "acc_norm": 0.32, "acc_norm_stderr": 0.046882617226215034 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.5606936416184971, "acc_stderr": 0.03784271932887467, "acc_norm": 0.5606936416184971, "acc_norm_stderr": 0.03784271932887467 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.37254901960784315, "acc_stderr": 0.04810840148082635, "acc_norm": 0.37254901960784315, "acc_norm_stderr": 0.04810840148082635 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.68, "acc_stderr": 0.046882617226215034, "acc_norm": 0.68, "acc_norm_stderr": 0.046882617226215034 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.5617021276595745, "acc_stderr": 0.032436186361081004, "acc_norm": 0.5617021276595745, "acc_norm_stderr": 0.032436186361081004 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.4473684210526316, "acc_stderr": 0.046774730044911984, "acc_norm": 0.4473684210526316, "acc_norm_stderr": 0.046774730044911984 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.503448275862069, "acc_stderr": 0.0416656757710158, "acc_norm": 0.503448275862069, "acc_norm_stderr": 0.0416656757710158 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.4708994708994709, "acc_stderr": 0.025707658614154957, "acc_norm": 0.4708994708994709, "acc_norm_stderr": 0.025707658614154957 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.5079365079365079, "acc_stderr": 0.044715725362943486, "acc_norm": 0.5079365079365079, "acc_norm_stderr": 0.044715725362943486 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.39, "acc_stderr": 0.04902071300001974, "acc_norm": 0.39, "acc_norm_stderr": 0.04902071300001974 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.7322580645161291, "acc_stderr": 0.02518900666021238, "acc_norm": 0.7322580645161291, "acc_norm_stderr": 0.02518900666021238 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.42857142857142855, "acc_stderr": 0.034819048444388045, "acc_norm": 0.42857142857142855, "acc_norm_stderr": 0.034819048444388045 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.63, "acc_stderr": 0.04852365870939099, "acc_norm": 0.63, "acc_norm_stderr": 0.04852365870939099 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.7454545454545455, "acc_stderr": 0.0340150671524904, "acc_norm": 0.7454545454545455, "acc_norm_stderr": 0.0340150671524904 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.7727272727272727, "acc_stderr": 0.029857515673386417, "acc_norm": 0.7727272727272727, "acc_norm_stderr": 0.029857515673386417 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.8238341968911918, "acc_stderr": 0.027493504244548057, "acc_norm": 0.8238341968911918, "acc_norm_stderr": 0.027493504244548057 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.6153846153846154, "acc_stderr": 0.024666744915187222, "acc_norm": 0.6153846153846154, "acc_norm_stderr": 0.024666744915187222 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.37407407407407406, "acc_stderr": 0.02950286112895529, "acc_norm": 0.37407407407407406, "acc_norm_stderr": 0.02950286112895529 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.7016806722689075, "acc_stderr": 0.029719142876342867, "acc_norm": 0.7016806722689075, "acc_norm_stderr": 0.029719142876342867 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.37748344370860926, "acc_stderr": 0.0395802723112157, "acc_norm": 0.37748344370860926, "acc_norm_stderr": 0.0395802723112157 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.8311926605504587, "acc_stderr": 0.016060056268530364, "acc_norm": 0.8311926605504587, "acc_norm_stderr": 0.016060056268530364 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.49537037037037035, "acc_stderr": 0.03409825519163572, "acc_norm": 0.49537037037037035, "acc_norm_stderr": 0.03409825519163572 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.7647058823529411, "acc_stderr": 0.029771775228145628, "acc_norm": 0.7647058823529411, "acc_norm_stderr": 0.029771775228145628 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.7974683544303798, "acc_stderr": 0.026160568246601443, "acc_norm": 0.7974683544303798, "acc_norm_stderr": 0.026160568246601443 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.6278026905829597, "acc_stderr": 0.03244305283008731, "acc_norm": 0.6278026905829597, "acc_norm_stderr": 0.03244305283008731 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.6564885496183206, "acc_stderr": 0.041649760719448786, "acc_norm": 0.6564885496183206, "acc_norm_stderr": 0.041649760719448786 }, "harness|hendrycksTest-international_law|5": { "acc": 0.7851239669421488, "acc_stderr": 0.037494924487096966, "acc_norm": 0.7851239669421488, "acc_norm_stderr": 0.037494924487096966 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.7037037037037037, "acc_stderr": 0.044143436668549335, "acc_norm": 0.7037037037037037, "acc_norm_stderr": 0.044143436668549335 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.6748466257668712, "acc_stderr": 0.03680350371286461, "acc_norm": 0.6748466257668712, "acc_norm_stderr": 0.03680350371286461 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.4732142857142857, "acc_stderr": 0.04738975119274155, "acc_norm": 0.4732142857142857, "acc_norm_stderr": 0.04738975119274155 }, "harness|hendrycksTest-management|5": { "acc": 0.7475728155339806, "acc_stderr": 0.04301250399690878, "acc_norm": 0.7475728155339806, "acc_norm_stderr": 0.04301250399690878 }, "harness|hendrycksTest-marketing|5": { "acc": 0.782051282051282, "acc_stderr": 0.02704685763071668, "acc_norm": 0.782051282051282, "acc_norm_stderr": 0.02704685763071668 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.63, "acc_stderr": 0.048523658709391, "acc_norm": 0.63, "acc_norm_stderr": 0.048523658709391 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.7803320561941252, "acc_stderr": 0.01480538447837116, "acc_norm": 0.7803320561941252, "acc_norm_stderr": 0.01480538447837116 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.661849710982659, "acc_stderr": 0.02546977014940017, "acc_norm": 0.661849710982659, "acc_norm_stderr": 0.02546977014940017 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.3396648044692737, "acc_stderr": 0.0158394004062125, "acc_norm": 0.3396648044692737, "acc_norm_stderr": 0.0158394004062125 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.6601307189542484, "acc_stderr": 0.027121956071388863, "acc_norm": 0.6601307189542484, "acc_norm_stderr": 0.027121956071388863 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.6977491961414791, "acc_stderr": 0.02608270069539966, "acc_norm": 0.6977491961414791, "acc_norm_stderr": 0.02608270069539966 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.691358024691358, "acc_stderr": 0.025702640260603746, "acc_norm": 0.691358024691358, "acc_norm_stderr": 0.025702640260603746 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.4858156028368794, "acc_stderr": 0.02981549448368206, "acc_norm": 0.4858156028368794, "acc_norm_stderr": 0.02981549448368206 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.4765319426336376, "acc_stderr": 0.012756161942523372, "acc_norm": 0.4765319426336376, "acc_norm_stderr": 0.012756161942523372 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.6617647058823529, "acc_stderr": 0.02873932851398358, "acc_norm": 0.6617647058823529, "acc_norm_stderr": 0.02873932851398358 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.6683006535947712, "acc_stderr": 0.01904748523936038, "acc_norm": 0.6683006535947712, "acc_norm_stderr": 0.01904748523936038 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.6636363636363637, "acc_stderr": 0.04525393596302506, "acc_norm": 0.6636363636363637, "acc_norm_stderr": 0.04525393596302506 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.6775510204081633, "acc_stderr": 0.029923100563683913, "acc_norm": 0.6775510204081633, "acc_norm_stderr": 0.029923100563683913 }, "harness|hendrycksTest-sociology|5": { "acc": 0.8109452736318408, "acc_stderr": 0.027686913588013007, "acc_norm": 0.8109452736318408, "acc_norm_stderr": 0.027686913588013007 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.82, "acc_stderr": 0.038612291966536955, "acc_norm": 0.82, "acc_norm_stderr": 0.038612291966536955 }, "harness|hendrycksTest-virology|5": { "acc": 0.463855421686747, "acc_stderr": 0.03882310850890593, "acc_norm": 0.463855421686747, "acc_norm_stderr": 0.03882310850890593 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.7894736842105263, "acc_stderr": 0.03126781714663179, "acc_norm": 0.7894736842105263, "acc_norm_stderr": 0.03126781714663179 }, "harness|truthfulqa:mc|0": { "mc1": 0.37209302325581395, "mc1_stderr": 0.016921090118814038, "mc2": 0.5325609210892159, "mc2_stderr": 0.015366351468634187 }, "harness|winogrande|5": { "acc": 0.7679558011049724, "acc_stderr": 0.011864149691827931 }, "harness|gsm8k|5": { "acc": 0.20621683093252463, "acc_stderr": 0.011144364089781436 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_rizla__rizla54
[ "region:us" ]
2024-02-02T08:29:14+00:00
{"pretty_name": "Evaluation run of rizla/rizla54", "dataset_summary": "Dataset automatically created during the evaluation run of model [rizla/rizla54](https://huggingface.co/rizla/rizla54) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_rizla__rizla54\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-02-02T08:26:50.989261](https://huggingface.co/datasets/open-llm-leaderboard/details_rizla__rizla54/blob/main/results_2024-02-02T08-26-50.989261.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.6070754715230492,\n \"acc_stderr\": 0.033251216013816025,\n \"acc_norm\": 0.6153378461064626,\n \"acc_norm_stderr\": 0.03397161363942805,\n \"mc1\": 0.37209302325581395,\n \"mc1_stderr\": 0.016921090118814038,\n \"mc2\": 0.5325609210892159,\n \"mc2_stderr\": 0.015366351468634187\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.5392491467576792,\n \"acc_stderr\": 0.014566303676636586,\n \"acc_norm\": 0.5819112627986348,\n \"acc_norm_stderr\": 0.014413988396996081\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.5825532762397929,\n \"acc_stderr\": 0.004921300331285573,\n \"acc_norm\": 0.7873929496116312,\n \"acc_norm_stderr\": 0.004083157276012493\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.3,\n \"acc_stderr\": 0.04605661864718381,\n \"acc_norm\": 0.3,\n \"acc_norm_stderr\": 0.04605661864718381\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.6222222222222222,\n \"acc_stderr\": 0.04188307537595852,\n \"acc_norm\": 0.6222222222222222,\n \"acc_norm_stderr\": 0.04188307537595852\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.743421052631579,\n \"acc_stderr\": 0.0355418036802569,\n \"acc_norm\": 0.743421052631579,\n \"acc_norm_stderr\": 0.0355418036802569\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.67,\n \"acc_stderr\": 0.04725815626252607,\n \"acc_norm\": 0.67,\n \"acc_norm_stderr\": 0.04725815626252607\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.6226415094339622,\n \"acc_stderr\": 0.029832808114796005,\n \"acc_norm\": 0.6226415094339622,\n \"acc_norm_stderr\": 0.029832808114796005\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.6527777777777778,\n \"acc_stderr\": 0.039812405437178615,\n \"acc_norm\": 0.6527777777777778,\n \"acc_norm_stderr\": 0.039812405437178615\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.49,\n \"acc_stderr\": 0.05024183937956912,\n \"acc_norm\": 0.49,\n \"acc_norm_stderr\": 0.05024183937956912\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.5,\n \"acc_stderr\": 0.050251890762960605,\n \"acc_norm\": 0.5,\n \"acc_norm_stderr\": 0.050251890762960605\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.32,\n \"acc_stderr\": 0.046882617226215034,\n \"acc_norm\": 0.32,\n \"acc_norm_stderr\": 0.046882617226215034\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.5606936416184971,\n \"acc_stderr\": 0.03784271932887467,\n \"acc_norm\": 0.5606936416184971,\n \"acc_norm_stderr\": 0.03784271932887467\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.37254901960784315,\n \"acc_stderr\": 0.04810840148082635,\n \"acc_norm\": 0.37254901960784315,\n \"acc_norm_stderr\": 0.04810840148082635\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.68,\n \"acc_stderr\": 0.046882617226215034,\n \"acc_norm\": 0.68,\n \"acc_norm_stderr\": 0.046882617226215034\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.5617021276595745,\n \"acc_stderr\": 0.032436186361081004,\n \"acc_norm\": 0.5617021276595745,\n \"acc_norm_stderr\": 0.032436186361081004\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.4473684210526316,\n \"acc_stderr\": 0.046774730044911984,\n \"acc_norm\": 0.4473684210526316,\n \"acc_norm_stderr\": 0.046774730044911984\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.503448275862069,\n \"acc_stderr\": 0.0416656757710158,\n \"acc_norm\": 0.503448275862069,\n \"acc_norm_stderr\": 0.0416656757710158\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.4708994708994709,\n \"acc_stderr\": 0.025707658614154957,\n \"acc_norm\": 0.4708994708994709,\n \"acc_norm_stderr\": 0.025707658614154957\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.5079365079365079,\n \"acc_stderr\": 0.044715725362943486,\n \"acc_norm\": 0.5079365079365079,\n \"acc_norm_stderr\": 0.044715725362943486\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.39,\n \"acc_stderr\": 0.04902071300001974,\n \"acc_norm\": 0.39,\n \"acc_norm_stderr\": 0.04902071300001974\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.7322580645161291,\n \"acc_stderr\": 0.02518900666021238,\n \"acc_norm\": 0.7322580645161291,\n \"acc_norm_stderr\": 0.02518900666021238\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.42857142857142855,\n \"acc_stderr\": 0.034819048444388045,\n \"acc_norm\": 0.42857142857142855,\n \"acc_norm_stderr\": 0.034819048444388045\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.63,\n \"acc_stderr\": 0.04852365870939099,\n \"acc_norm\": 0.63,\n \"acc_norm_stderr\": 0.04852365870939099\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.7454545454545455,\n \"acc_stderr\": 0.0340150671524904,\n \"acc_norm\": 0.7454545454545455,\n \"acc_norm_stderr\": 0.0340150671524904\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.7727272727272727,\n \"acc_stderr\": 0.029857515673386417,\n \"acc_norm\": 0.7727272727272727,\n \"acc_norm_stderr\": 0.029857515673386417\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.8238341968911918,\n \"acc_stderr\": 0.027493504244548057,\n \"acc_norm\": 0.8238341968911918,\n \"acc_norm_stderr\": 0.027493504244548057\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.6153846153846154,\n \"acc_stderr\": 0.024666744915187222,\n \"acc_norm\": 0.6153846153846154,\n \"acc_norm_stderr\": 0.024666744915187222\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.37407407407407406,\n \"acc_stderr\": 0.02950286112895529,\n \"acc_norm\": 0.37407407407407406,\n \"acc_norm_stderr\": 0.02950286112895529\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.7016806722689075,\n \"acc_stderr\": 0.029719142876342867,\n \"acc_norm\": 0.7016806722689075,\n \"acc_norm_stderr\": 0.029719142876342867\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.37748344370860926,\n \"acc_stderr\": 0.0395802723112157,\n \"acc_norm\": 0.37748344370860926,\n \"acc_norm_stderr\": 0.0395802723112157\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.8311926605504587,\n \"acc_stderr\": 0.016060056268530364,\n \"acc_norm\": 0.8311926605504587,\n \"acc_norm_stderr\": 0.016060056268530364\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.49537037037037035,\n \"acc_stderr\": 0.03409825519163572,\n \"acc_norm\": 0.49537037037037035,\n \"acc_norm_stderr\": 0.03409825519163572\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.7647058823529411,\n \"acc_stderr\": 0.029771775228145628,\n \"acc_norm\": 0.7647058823529411,\n \"acc_norm_stderr\": 0.029771775228145628\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.7974683544303798,\n \"acc_stderr\": 0.026160568246601443,\n \"acc_norm\": 0.7974683544303798,\n \"acc_norm_stderr\": 0.026160568246601443\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.6278026905829597,\n \"acc_stderr\": 0.03244305283008731,\n \"acc_norm\": 0.6278026905829597,\n \"acc_norm_stderr\": 0.03244305283008731\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.6564885496183206,\n \"acc_stderr\": 0.041649760719448786,\n \"acc_norm\": 0.6564885496183206,\n \"acc_norm_stderr\": 0.041649760719448786\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.7851239669421488,\n \"acc_stderr\": 0.037494924487096966,\n \"acc_norm\": 0.7851239669421488,\n \"acc_norm_stderr\": 0.037494924487096966\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.7037037037037037,\n \"acc_stderr\": 0.044143436668549335,\n \"acc_norm\": 0.7037037037037037,\n \"acc_norm_stderr\": 0.044143436668549335\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.6748466257668712,\n \"acc_stderr\": 0.03680350371286461,\n \"acc_norm\": 0.6748466257668712,\n \"acc_norm_stderr\": 0.03680350371286461\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.4732142857142857,\n \"acc_stderr\": 0.04738975119274155,\n \"acc_norm\": 0.4732142857142857,\n \"acc_norm_stderr\": 0.04738975119274155\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.7475728155339806,\n \"acc_stderr\": 0.04301250399690878,\n \"acc_norm\": 0.7475728155339806,\n \"acc_norm_stderr\": 0.04301250399690878\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.782051282051282,\n \"acc_stderr\": 0.02704685763071668,\n \"acc_norm\": 0.782051282051282,\n \"acc_norm_stderr\": 0.02704685763071668\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.63,\n \"acc_stderr\": 0.048523658709391,\n \"acc_norm\": 0.63,\n \"acc_norm_stderr\": 0.048523658709391\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.7803320561941252,\n \"acc_stderr\": 0.01480538447837116,\n \"acc_norm\": 0.7803320561941252,\n \"acc_norm_stderr\": 0.01480538447837116\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.661849710982659,\n \"acc_stderr\": 0.02546977014940017,\n \"acc_norm\": 0.661849710982659,\n \"acc_norm_stderr\": 0.02546977014940017\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.3396648044692737,\n \"acc_stderr\": 0.0158394004062125,\n \"acc_norm\": 0.3396648044692737,\n \"acc_norm_stderr\": 0.0158394004062125\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.6601307189542484,\n \"acc_stderr\": 0.027121956071388863,\n \"acc_norm\": 0.6601307189542484,\n \"acc_norm_stderr\": 0.027121956071388863\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.6977491961414791,\n \"acc_stderr\": 0.02608270069539966,\n \"acc_norm\": 0.6977491961414791,\n \"acc_norm_stderr\": 0.02608270069539966\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.691358024691358,\n \"acc_stderr\": 0.025702640260603746,\n \"acc_norm\": 0.691358024691358,\n \"acc_norm_stderr\": 0.025702640260603746\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.4858156028368794,\n \"acc_stderr\": 0.02981549448368206,\n \"acc_norm\": 0.4858156028368794,\n \"acc_norm_stderr\": 0.02981549448368206\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.4765319426336376,\n \"acc_stderr\": 0.012756161942523372,\n \"acc_norm\": 0.4765319426336376,\n \"acc_norm_stderr\": 0.012756161942523372\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.6617647058823529,\n \"acc_stderr\": 0.02873932851398358,\n \"acc_norm\": 0.6617647058823529,\n \"acc_norm_stderr\": 0.02873932851398358\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.6683006535947712,\n \"acc_stderr\": 0.01904748523936038,\n \"acc_norm\": 0.6683006535947712,\n \"acc_norm_stderr\": 0.01904748523936038\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6636363636363637,\n \"acc_stderr\": 0.04525393596302506,\n \"acc_norm\": 0.6636363636363637,\n \"acc_norm_stderr\": 0.04525393596302506\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.6775510204081633,\n \"acc_stderr\": 0.029923100563683913,\n \"acc_norm\": 0.6775510204081633,\n \"acc_norm_stderr\": 0.029923100563683913\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.8109452736318408,\n \"acc_stderr\": 0.027686913588013007,\n \"acc_norm\": 0.8109452736318408,\n \"acc_norm_stderr\": 0.027686913588013007\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.82,\n \"acc_stderr\": 0.038612291966536955,\n \"acc_norm\": 0.82,\n \"acc_norm_stderr\": 0.038612291966536955\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.463855421686747,\n \"acc_stderr\": 0.03882310850890593,\n \"acc_norm\": 0.463855421686747,\n \"acc_norm_stderr\": 0.03882310850890593\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.7894736842105263,\n \"acc_stderr\": 0.03126781714663179,\n \"acc_norm\": 0.7894736842105263,\n \"acc_norm_stderr\": 0.03126781714663179\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.37209302325581395,\n \"mc1_stderr\": 0.016921090118814038,\n \"mc2\": 0.5325609210892159,\n \"mc2_stderr\": 0.015366351468634187\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.7679558011049724,\n \"acc_stderr\": 0.011864149691827931\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.20621683093252463,\n \"acc_stderr\": 0.011144364089781436\n }\n}\n```", "repo_url": "https://huggingface.co/rizla/rizla54", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_02_02T08_26_50.989261", "path": ["**/details_harness|arc:challenge|25_2024-02-02T08-26-50.989261.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-02-02T08-26-50.989261.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_02_02T08_26_50.989261", "path": ["**/details_harness|gsm8k|5_2024-02-02T08-26-50.989261.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-02-02T08-26-50.989261.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_02_02T08_26_50.989261", "path": ["**/details_harness|hellaswag|10_2024-02-02T08-26-50.989261.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-02-02T08-26-50.989261.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_02_02T08_26_50.989261", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-02T08-26-50.989261.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-02-02T08-26-50.989261.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-02-02T08-26-50.989261.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-02-02T08-26-50.989261.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-02T08-26-50.989261.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-02-02T08-26-50.989261.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-02-02T08-26-50.989261.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-02-02T08-26-50.989261.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-02-02T08-26-50.989261.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-02-02T08-26-50.989261.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-02-02T08-26-50.989261.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-02-02T08-26-50.989261.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-02T08-26-50.989261.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-02-02T08-26-50.989261.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-02T08-26-50.989261.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-02T08-26-50.989261.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-02-02T08-26-50.989261.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-02-02T08-26-50.989261.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-02-02T08-26-50.989261.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-02T08-26-50.989261.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-02T08-26-50.989261.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-02T08-26-50.989261.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-02-02T08-26-50.989261.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-02T08-26-50.989261.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-02T08-26-50.989261.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-02T08-26-50.989261.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-02T08-26-50.989261.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-02-02T08-26-50.989261.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-02T08-26-50.989261.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-02T08-26-50.989261.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-02T08-26-50.989261.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-02T08-26-50.989261.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-02-02T08-26-50.989261.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-02-02T08-26-50.989261.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-02-02T08-26-50.989261.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-02-02T08-26-50.989261.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-02T08-26-50.989261.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-02-02T08-26-50.989261.parquet", "**/details_harness|hendrycksTest-management|5_2024-02-02T08-26-50.989261.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-02-02T08-26-50.989261.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-02-02T08-26-50.989261.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-02-02T08-26-50.989261.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-02-02T08-26-50.989261.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-02T08-26-50.989261.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-02-02T08-26-50.989261.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-02-02T08-26-50.989261.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-02-02T08-26-50.989261.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-02-02T08-26-50.989261.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-02-02T08-26-50.989261.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-02-02T08-26-50.989261.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-02-02T08-26-50.989261.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-02-02T08-26-50.989261.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-02-02T08-26-50.989261.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-02-02T08-26-50.989261.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-02T08-26-50.989261.parquet", "**/details_harness|hendrycksTest-virology|5_2024-02-02T08-26-50.989261.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-02-02T08-26-50.989261.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-02T08-26-50.989261.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-02-02T08-26-50.989261.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-02-02T08-26-50.989261.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-02-02T08-26-50.989261.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-02T08-26-50.989261.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-02-02T08-26-50.989261.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-02-02T08-26-50.989261.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-02-02T08-26-50.989261.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-02-02T08-26-50.989261.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-02-02T08-26-50.989261.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-02-02T08-26-50.989261.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-02-02T08-26-50.989261.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-02T08-26-50.989261.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-02-02T08-26-50.989261.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-02T08-26-50.989261.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-02T08-26-50.989261.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-02-02T08-26-50.989261.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-02-02T08-26-50.989261.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-02-02T08-26-50.989261.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-02T08-26-50.989261.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-02T08-26-50.989261.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-02T08-26-50.989261.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-02-02T08-26-50.989261.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-02T08-26-50.989261.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-02T08-26-50.989261.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-02T08-26-50.989261.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-02T08-26-50.989261.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-02-02T08-26-50.989261.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-02T08-26-50.989261.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-02T08-26-50.989261.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-02T08-26-50.989261.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-02T08-26-50.989261.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-02-02T08-26-50.989261.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-02-02T08-26-50.989261.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-02-02T08-26-50.989261.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-02-02T08-26-50.989261.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-02T08-26-50.989261.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-02-02T08-26-50.989261.parquet", "**/details_harness|hendrycksTest-management|5_2024-02-02T08-26-50.989261.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-02-02T08-26-50.989261.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-02-02T08-26-50.989261.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-02-02T08-26-50.989261.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-02-02T08-26-50.989261.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-02T08-26-50.989261.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-02-02T08-26-50.989261.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-02-02T08-26-50.989261.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-02-02T08-26-50.989261.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-02-02T08-26-50.989261.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-02-02T08-26-50.989261.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-02-02T08-26-50.989261.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-02-02T08-26-50.989261.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-02-02T08-26-50.989261.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-02-02T08-26-50.989261.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-02-02T08-26-50.989261.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-02T08-26-50.989261.parquet", "**/details_harness|hendrycksTest-virology|5_2024-02-02T08-26-50.989261.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-02-02T08-26-50.989261.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_02_02T08_26_50.989261", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-02T08-26-50.989261.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-02T08-26-50.989261.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_02_02T08_26_50.989261", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-02-02T08-26-50.989261.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-02-02T08-26-50.989261.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_02_02T08_26_50.989261", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-02-02T08-26-50.989261.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-02-02T08-26-50.989261.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_02_02T08_26_50.989261", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-02-02T08-26-50.989261.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-02-02T08-26-50.989261.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_02_02T08_26_50.989261", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-02T08-26-50.989261.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-02T08-26-50.989261.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_02_02T08_26_50.989261", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-02-02T08-26-50.989261.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-02-02T08-26-50.989261.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_02_02T08_26_50.989261", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-02-02T08-26-50.989261.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-02-02T08-26-50.989261.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_02_02T08_26_50.989261", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-02-02T08-26-50.989261.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-02-02T08-26-50.989261.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_02_02T08_26_50.989261", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-02-02T08-26-50.989261.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-02-02T08-26-50.989261.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_02_02T08_26_50.989261", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-02-02T08-26-50.989261.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-02-02T08-26-50.989261.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_02_02T08_26_50.989261", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-02-02T08-26-50.989261.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-02-02T08-26-50.989261.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_02_02T08_26_50.989261", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-02-02T08-26-50.989261.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-02-02T08-26-50.989261.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_02_02T08_26_50.989261", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-02T08-26-50.989261.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-02T08-26-50.989261.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_02_02T08_26_50.989261", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-02-02T08-26-50.989261.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-02-02T08-26-50.989261.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_02_02T08_26_50.989261", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-02T08-26-50.989261.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-02T08-26-50.989261.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_02_02T08_26_50.989261", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-02T08-26-50.989261.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-02T08-26-50.989261.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_02_02T08_26_50.989261", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-02-02T08-26-50.989261.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-02-02T08-26-50.989261.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_02_02T08_26_50.989261", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-02-02T08-26-50.989261.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-02-02T08-26-50.989261.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_02_02T08_26_50.989261", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-02-02T08-26-50.989261.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-02-02T08-26-50.989261.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_02_02T08_26_50.989261", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-02T08-26-50.989261.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-02T08-26-50.989261.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_02_02T08_26_50.989261", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-02T08-26-50.989261.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-02T08-26-50.989261.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_02_02T08_26_50.989261", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-02T08-26-50.989261.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-02T08-26-50.989261.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_02_02T08_26_50.989261", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-02-02T08-26-50.989261.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-02-02T08-26-50.989261.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_02_02T08_26_50.989261", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-02T08-26-50.989261.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-02T08-26-50.989261.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_02_02T08_26_50.989261", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-02T08-26-50.989261.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-02T08-26-50.989261.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_02_02T08_26_50.989261", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-02T08-26-50.989261.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-02T08-26-50.989261.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_02_02T08_26_50.989261", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-02T08-26-50.989261.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-02T08-26-50.989261.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_02_02T08_26_50.989261", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-02-02T08-26-50.989261.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-02-02T08-26-50.989261.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_02_02T08_26_50.989261", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-02T08-26-50.989261.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-02T08-26-50.989261.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_02_02T08_26_50.989261", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-02T08-26-50.989261.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-02T08-26-50.989261.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_02_02T08_26_50.989261", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-02T08-26-50.989261.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-02T08-26-50.989261.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_02_02T08_26_50.989261", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-02T08-26-50.989261.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-02T08-26-50.989261.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_02_02T08_26_50.989261", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-02-02T08-26-50.989261.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-02-02T08-26-50.989261.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_02_02T08_26_50.989261", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-02-02T08-26-50.989261.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-02-02T08-26-50.989261.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_02_02T08_26_50.989261", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-02-02T08-26-50.989261.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-02-02T08-26-50.989261.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_02_02T08_26_50.989261", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-02-02T08-26-50.989261.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-02-02T08-26-50.989261.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_02_02T08_26_50.989261", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-02T08-26-50.989261.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-02T08-26-50.989261.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_02_02T08_26_50.989261", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-02-02T08-26-50.989261.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-02-02T08-26-50.989261.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_02_02T08_26_50.989261", "path": ["**/details_harness|hendrycksTest-management|5_2024-02-02T08-26-50.989261.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-02-02T08-26-50.989261.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_02_02T08_26_50.989261", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-02-02T08-26-50.989261.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-02-02T08-26-50.989261.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_02_02T08_26_50.989261", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-02-02T08-26-50.989261.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-02-02T08-26-50.989261.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_02_02T08_26_50.989261", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-02-02T08-26-50.989261.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-02-02T08-26-50.989261.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_02_02T08_26_50.989261", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-02-02T08-26-50.989261.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-02-02T08-26-50.989261.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_02_02T08_26_50.989261", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-02T08-26-50.989261.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-02T08-26-50.989261.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_02_02T08_26_50.989261", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-02-02T08-26-50.989261.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-02-02T08-26-50.989261.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_02_02T08_26_50.989261", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-02-02T08-26-50.989261.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-02-02T08-26-50.989261.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_02_02T08_26_50.989261", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-02-02T08-26-50.989261.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-02-02T08-26-50.989261.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_02_02T08_26_50.989261", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-02-02T08-26-50.989261.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-02-02T08-26-50.989261.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_02_02T08_26_50.989261", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-02-02T08-26-50.989261.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-02-02T08-26-50.989261.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_02_02T08_26_50.989261", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-02-02T08-26-50.989261.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-02-02T08-26-50.989261.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_02_02T08_26_50.989261", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-02-02T08-26-50.989261.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-02-02T08-26-50.989261.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_02_02T08_26_50.989261", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-02-02T08-26-50.989261.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-02-02T08-26-50.989261.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_02_02T08_26_50.989261", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-02-02T08-26-50.989261.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-02-02T08-26-50.989261.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_02_02T08_26_50.989261", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-02-02T08-26-50.989261.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-02-02T08-26-50.989261.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_02_02T08_26_50.989261", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-02T08-26-50.989261.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-02T08-26-50.989261.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_02_02T08_26_50.989261", "path": ["**/details_harness|hendrycksTest-virology|5_2024-02-02T08-26-50.989261.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-02-02T08-26-50.989261.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_02_02T08_26_50.989261", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-02-02T08-26-50.989261.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-02-02T08-26-50.989261.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_02_02T08_26_50.989261", "path": ["**/details_harness|truthfulqa:mc|0_2024-02-02T08-26-50.989261.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-02-02T08-26-50.989261.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_02_02T08_26_50.989261", "path": ["**/details_harness|winogrande|5_2024-02-02T08-26-50.989261.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-02-02T08-26-50.989261.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_02_02T08_26_50.989261", "path": ["results_2024-02-02T08-26-50.989261.parquet"]}, {"split": "latest", "path": ["results_2024-02-02T08-26-50.989261.parquet"]}]}]}
2024-02-02T08:29:40+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of rizla/rizla54 Dataset automatically created during the evaluation run of model rizla/rizla54 on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-02-02T08:26:50.989261(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of rizla/rizla54\n\n\n\nDataset automatically created during the evaluation run of model rizla/rizla54 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-02-02T08:26:50.989261(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of rizla/rizla54\n\n\n\nDataset automatically created during the evaluation run of model rizla/rizla54 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-02-02T08:26:50.989261(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
35a12f88da44871898dbc39695a347f8edf53d02
# Dataset of [Anime-based] Isla (Plastic Memories) This is the dataset of [Anime-based] Isla (Plastic Memories), containing 130 images and their tags. The core tags of this character are `long_hair, red_eyes, ahoge, twintails, white_hair`, which are pruned in this dataset. Images are crawled from many sites (e.g. danbooru, pixiv, zerochan ...), the auto-crawling system is powered by [DeepGHS Team](https://github.com/deepghs)([huggingface organization](https://huggingface.co/deepghs)). ## List of Packages | Name | Images | Size | Download | Type | Description | |:-----------------|---------:|:-----------|:-----------------------------------------------------------------------------------------------------------------------------|:-----------|:---------------------------------------------------------------------| | raw | 130 | 171.52 MiB | [Download](https://huggingface.co/datasets/CyberHarem/isla_fanart_plasticmemories/resolve/main/dataset-raw.zip) | Waifuc-Raw | Raw data with meta information (min edge aligned to 1400 if larger). | | 800 | 130 | 94.34 MiB | [Download](https://huggingface.co/datasets/CyberHarem/isla_fanart_plasticmemories/resolve/main/dataset-800.zip) | IMG+TXT | dataset with the shorter side not exceeding 800 pixels. | | stage3-p480-800 | 321 | 209.24 MiB | [Download](https://huggingface.co/datasets/CyberHarem/isla_fanart_plasticmemories/resolve/main/dataset-stage3-p480-800.zip) | IMG+TXT | 3-stage cropped dataset with the area not less than 480x480 pixels. | | 1200 | 130 | 148.42 MiB | [Download](https://huggingface.co/datasets/CyberHarem/isla_fanart_plasticmemories/resolve/main/dataset-1200.zip) | IMG+TXT | dataset with the shorter side not exceeding 1200 pixels. | | stage3-p480-1200 | 321 | 294.47 MiB | [Download](https://huggingface.co/datasets/CyberHarem/isla_fanart_plasticmemories/resolve/main/dataset-stage3-p480-1200.zip) | IMG+TXT | 3-stage cropped dataset with the area not less than 480x480 pixels. | ### Load Raw Dataset with Waifuc We provide raw dataset (including tagged images) for [waifuc](https://deepghs.github.io/waifuc/main/tutorials/installation/index.html) loading. If you need this, just run the following code ```python import os import zipfile from huggingface_hub import hf_hub_download from waifuc.source import LocalSource # download raw archive file zip_file = hf_hub_download( repo_id='CyberHarem/isla_fanart_plasticmemories', repo_type='dataset', filename='dataset-raw.zip', ) # extract files to your directory dataset_dir = 'dataset_dir' os.makedirs(dataset_dir, exist_ok=True) with zipfile.ZipFile(zip_file, 'r') as zf: zf.extractall(dataset_dir) # load the dataset with waifuc source = LocalSource(dataset_dir) for item in source: print(item.image, item.meta['filename'], item.meta['tags']) ``` ## List of Clusters List of tag clustering result, maybe some outfits can be mined here. ### Raw Text Version | # | Samples | Img-1 | Img-2 | Img-3 | Img-4 | Img-5 | Tags | |----:|----------:|:--------------------------------|:--------------------------------|:--------------------------------|:--------------------------------|:--------------------------------|:-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| | 0 | 5 | ![](samples/0/clu0-sample0.png) | ![](samples/0/clu0-sample1.png) | ![](samples/0/clu0-sample2.png) | ![](samples/0/clu0-sample3.png) | ![](samples/0/clu0-sample4.png) | 1girl, boots, detached_sleeves, employee_uniform, necktie, solo, white_footwear, skirt, looking_at_viewer | | 1 | 9 | ![](samples/1/clu1-sample0.png) | ![](samples/1/clu1-sample1.png) | ![](samples/1/clu1-sample2.png) | ![](samples/1/clu1-sample3.png) | ![](samples/1/clu1-sample4.png) | 1girl, detached_sleeves, solo, necktie, blush, employee_uniform, looking_at_viewer, smile, sitting | | 2 | 7 | ![](samples/2/clu2-sample0.png) | ![](samples/2/clu2-sample1.png) | ![](samples/2/clu2-sample2.png) | ![](samples/2/clu2-sample3.png) | ![](samples/2/clu2-sample4.png) | bangs, closed_mouth, collared_shirt, detached_sleeves, hair_between_eyes, red_necktie, sleeveless_shirt, 1girl, blush, long_sleeves, looking_at_viewer, solo, black_shirt, very_long_hair, simple_background, smile, white_background, white_shirt, white_skirt, bare_shoulders, uniform, upper_body | | 3 | 5 | ![](samples/3/clu3-sample0.png) | ![](samples/3/clu3-sample1.png) | ![](samples/3/clu3-sample2.png) | ![](samples/3/clu3-sample3.png) | ![](samples/3/clu3-sample4.png) | 1girl, coat, plaid_scarf, blush, red_scarf, solo, hair_between_eyes, closed_eyes, facing_viewer, long_sleeves, scarf_over_mouth, upper_body | ### Table Version | # | Samples | Img-1 | Img-2 | Img-3 | Img-4 | Img-5 | 1girl | boots | detached_sleeves | employee_uniform | necktie | solo | white_footwear | skirt | looking_at_viewer | blush | smile | sitting | bangs | closed_mouth | collared_shirt | hair_between_eyes | red_necktie | sleeveless_shirt | long_sleeves | black_shirt | very_long_hair | simple_background | white_background | white_shirt | white_skirt | bare_shoulders | uniform | upper_body | coat | plaid_scarf | red_scarf | closed_eyes | facing_viewer | scarf_over_mouth | |----:|----------:|:--------------------------------|:--------------------------------|:--------------------------------|:--------------------------------|:--------------------------------|:--------|:--------|:-------------------|:-------------------|:----------|:-------|:-----------------|:--------|:--------------------|:--------|:--------|:----------|:--------|:---------------|:-----------------|:--------------------|:--------------|:-------------------|:---------------|:--------------|:-----------------|:--------------------|:-------------------|:--------------|:--------------|:-----------------|:----------|:-------------|:-------|:--------------|:------------|:--------------|:----------------|:-------------------| | 0 | 5 | ![](samples/0/clu0-sample0.png) | ![](samples/0/clu0-sample1.png) | ![](samples/0/clu0-sample2.png) | ![](samples/0/clu0-sample3.png) | ![](samples/0/clu0-sample4.png) | X | X | X | X | X | X | X | X | X | | | | | | | | | | | | | | | | | | | | | | | | | | | 1 | 9 | ![](samples/1/clu1-sample0.png) | ![](samples/1/clu1-sample1.png) | ![](samples/1/clu1-sample2.png) | ![](samples/1/clu1-sample3.png) | ![](samples/1/clu1-sample4.png) | X | | X | X | X | X | | | X | X | X | X | | | | | | | | | | | | | | | | | | | | | | | | 2 | 7 | ![](samples/2/clu2-sample0.png) | ![](samples/2/clu2-sample1.png) | ![](samples/2/clu2-sample2.png) | ![](samples/2/clu2-sample3.png) | ![](samples/2/clu2-sample4.png) | X | | X | | | X | | | X | X | X | | X | X | X | X | X | X | X | X | X | X | X | X | X | X | X | X | | | | | | | | 3 | 5 | ![](samples/3/clu3-sample0.png) | ![](samples/3/clu3-sample1.png) | ![](samples/3/clu3-sample2.png) | ![](samples/3/clu3-sample3.png) | ![](samples/3/clu3-sample4.png) | X | | | | | X | | | | X | | | | | | X | | | X | | | | | | | | | X | X | X | X | X | X | X |
CyberHarem/isla_fanart_plasticmemories
[ "task_categories:text-to-image", "size_categories:n<1K", "license:mit", "art", "not-for-all-audiences", "region:us" ]
2024-02-02T08:30:01+00:00
{"license": "mit", "size_categories": ["n<1K"], "task_categories": ["text-to-image"], "tags": ["art", "not-for-all-audiences"]}
2024-02-02T08:58:57+00:00
[]
[]
TAGS #task_categories-text-to-image #size_categories-n<1K #license-mit #art #not-for-all-audiences #region-us
Dataset of [Anime-based] Isla (Plastic Memories) ================================================ This is the dataset of [Anime-based] Isla (Plastic Memories), containing 130 images and their tags. The core tags of this character are 'long\_hair, red\_eyes, ahoge, twintails, white\_hair', which are pruned in this dataset. Images are crawled from many sites (e.g. danbooru, pixiv, zerochan ...), the auto-crawling system is powered by DeepGHS Team(huggingface organization). List of Packages ---------------- ### Load Raw Dataset with Waifuc We provide raw dataset (including tagged images) for waifuc loading. If you need this, just run the following code List of Clusters ---------------- List of tag clustering result, maybe some outfits can be mined here. ### Raw Text Version ### Table Version
[ "### Load Raw Dataset with Waifuc\n\n\nWe provide raw dataset (including tagged images) for waifuc loading. If you need this, just run the following code\n\n\nList of Clusters\n----------------\n\n\nList of tag clustering result, maybe some outfits can be mined here.", "### Raw Text Version", "### Table Version" ]
[ "TAGS\n#task_categories-text-to-image #size_categories-n<1K #license-mit #art #not-for-all-audiences #region-us \n", "### Load Raw Dataset with Waifuc\n\n\nWe provide raw dataset (including tagged images) for waifuc loading. If you need this, just run the following code\n\n\nList of Clusters\n----------------\n\n\nList of tag clustering result, maybe some outfits can be mined here.", "### Raw Text Version", "### Table Version" ]
fc56638f5efd25434826c7ee6a07cec808d1f0cd
# Synthetic Search Filters This is generated with GPT-4 Turbo possible search filters and theirs representations for the given business/service categories and for the Russian language domain: ``` Artificial Intelligence and Machine Learning, Automotive, Automotive Dealerships, Banking Services, Books and Media, Cloud Computing Services, Cloud-based Development Environments, Collaborative Development Environments, Commercial Real Estate, Continuous Integration/Continuous Deployment, Credit Services, Customer Support Services, Customer Support and Feedback, Cybersecurity Software, Data Analytics and Business Intelligence, Dating Apps, Digital and Mobile Banking, Documentation and Knowledge Sharing, E-commerce Platforms, Eco-Friendly and Sustainable Properties, Educational Institutions, Electronics, Enterprise Software Development, Entertainment and Media Platforms, Event Planning Services, Fashion and Apparel, Financial Planning and Advisory, Food and Grocery, Game Development, Government Services, Health and Beauty, Healthcare Providers, Home and Garden, Image Stock Platforms, Insurance Services, International Real Estate, Internet of Things (IoT) Development, Investment Services, Issue Tracking and Bug Reporting, Job Recruitment Agencies, Land Sales and Acquisitions, Legal Services, Logistics and Supply Chain Management, Luxury and High-End Properties, Market Research Firms, Mobile App Development, Mortgage and Real Estate Services, Payment Processing, Pet Supplies, Professional Social Networks, Project Management Tools, Property Management, Real Estate Consulting, Real Estate Development, Real Estate Investment, Residential Real Estate, Restaurants and Food Delivery Services, Retail Stores (Online and Offline), Risk Management and Compliance, Social Networks, Sports and Outdoors, Task and Time Management, Taxation Services, Team Communication and Chat Tools, Telecommunication Companies, Toys and Games, Travel and Booking Agencies, Travelers and Consumers, User Interface/User Experience Design, Version Control Systems, Video Hosting and Portals, Web Development ``` This is a parsed in the way each row is an unique pair filter - represantation version of [`EmbeddingStudio/synthetic-search-filters-ru-raw`](https://huggingface.co/datasets/EmbeddingStudio/synthetic-search-filters-ru-raw). ## Columns description * category (type: Optional[str]) - business/service category name. * category_description (type: Optional[str]) - longer description of business/service. * filter_name (type: Optional[str]) - meaningful name of filter. * representation_name (type: Optional[str]) - name of filter representation. * representation_type (type: Optional[str]) - python-like type of representation value (str, int, float, bool) * representation_enum (type: (Optional[List[str]])) - is represntation is an enumertation, this is a list of possible values. * representation_examples (type: List[Union[str, int, float]])) - exmaples of expected representation values. * representation_pattern (type: Optional[str]) - if representation is a pattern-like (e.g. `dd/mm/YYYY`), this is a pattern to follow. ## What are representations? It's easier to understand with an exmaple. Imagine, you have a filter named `Rating`, so it can be represented as: * Integer or float value in 1-5 scale * Integer or float value in 1-10 scale * Integer or float value in 1-100 scale * As the enumeration with values (*, **, ***, ****, *****) * As the enumeration with values (bad, medium, good, the best) ## Train / test splitting principles As we are trying to fine-tune LLM to follow zero-shot query parsing instructions, so we want to test: * Ability to work well with unseen domain * Ability to work well with unseen filters * Ability to work well with unseen queries For these purposes we: 1. We put into test split 5 categories, completely separared from train: Automotive, Educational Institutions, Enterprise Software Development, Payment Processing, Professional Social Networks. 2. Also out of each appearing in train company categories, we put aside / removed one filter and queries related to it. # How to use it ```python from datasets import load_dataset filters_dataset = load_dataset("EmbeddingStudio/synthetic-search-filters-ru") ``` Embedding Studio team uses this filters to [generate queries and theirs parsed version](EmbeddingStudio/query-parsing-instructions-saiga) for [IlyaGusev/saiga_mistral_7b_lora](https://huggingface.co/IlyaGusev/saiga_mistral_7b_lora) [fine-tuning to follow Zero-Shot search queries parsing instructions](https://huggingface.co/EmbeddingStudio/query-parser-saiga-mistral-7b-lora).
EmbeddingStudio/synthetic-search-filters-ru
[ "task_categories:token-classification", "task_categories:text-generation", "size_categories:1K<n<10K", "language:ru", "language:en", "license:apache-2.0", "synthetic", "search-queries", "e-commerce", "online-shops", "travel-agencies", "educational-institutions-ai", "job-recruitment-automation", "banking-digital-services", "investment-ai-analysis", "insurance-tech-innovation", "financial-advisory-ai", "credit-services-automation", "payment-processing-tech", "mortgage-tech-solutions", "real-estate-digital-solutions", "taxation-tech-services", "risk-management-ai", "compliance-automation", "digital-banking-innovation", "mobile-banking-tech", "online-retail-tech", "offline-retail-automation", "automotive-dealership-tech", "restaurant-automation-tech", "food-delivery-ai", "entertainment-platforms-ai", "media-platforms-tech", "government-services-automation", "travel-tech-innovation", "consumer-analytics-ai", "logistics-tech-automation", "supply-chain-ai", "customer-support-tech", "market-research-ai", "mobile-app-dev-tech", "game-dev-ai", "cloud-computing-services", "data-analytics-ai", "business-intelligence-ai", "cybersecurity-software-tech", "ui-ux-design-ai", "iot-development-tech", "project-management-tools-ai", "version-control-systems-tech", "ci-cd-automation", "issue-tracking-ai", "bug-reporting-automation", "collaborative-dev-environments", "team-communication-tech", "task-time-management-ai", "customer-feedback-ai", "cloud-based-dev-tech", "image-stock-platforms-ai", "video-hosting-tech", "social-networks-ai", "professional-social-networks-ai", "dating-apps-tech", "region:us" ]
2024-02-02T08:33:22+00:00
{"language": ["ru", "en"], "license": "apache-2.0", "size_categories": ["1K<n<10K"], "task_categories": ["token-classification", "text-generation"], "pretty_name": "Synthetic Search Filters : Russian", "dataset_info": {"features": [{"name": "category", "dtype": "string"}, {"name": "category_description", "dtype": "string"}, {"name": "filter_name", "dtype": "string"}, {"name": "representation_name", "dtype": "string"}, {"name": "representation_type", "dtype": "string"}, {"name": "representation_enum", "sequence": "string"}, {"name": "representation_examples", "sequence": "string"}, {"name": "representation_pattern", "dtype": "string"}], "splits": [{"name": "train_filters", "num_bytes": 859114, "num_examples": 2749}, {"name": "test_filters", "num_bytes": 1039548, "num_examples": 3317}], "download_size": 314660, "dataset_size": 1898662}, "configs": [{"config_name": "default", "data_files": [{"split": "train_filters", "path": "data/train_filters-*"}, {"split": "test_filters", "path": "data/test_filters-*"}]}], "tags": ["synthetic", "search-queries", "e-commerce", "online-shops", "travel-agencies", "educational-institutions-ai", "job-recruitment-automation", "banking-digital-services", "investment-ai-analysis", "insurance-tech-innovation", "financial-advisory-ai", "credit-services-automation", "payment-processing-tech", "mortgage-tech-solutions", "real-estate-digital-solutions", "taxation-tech-services", "risk-management-ai", "compliance-automation", "digital-banking-innovation", "mobile-banking-tech", "online-retail-tech", "offline-retail-automation", "automotive-dealership-tech", "restaurant-automation-tech", "food-delivery-ai", "entertainment-platforms-ai", "media-platforms-tech", "government-services-automation", "travel-tech-innovation", "consumer-analytics-ai", "logistics-tech-automation", "supply-chain-ai", "customer-support-tech", "market-research-ai", "mobile-app-dev-tech", "game-dev-ai", "cloud-computing-services", "data-analytics-ai", "business-intelligence-ai", "cybersecurity-software-tech", "ui-ux-design-ai", "iot-development-tech", "project-management-tools-ai", "version-control-systems-tech", "ci-cd-automation", "issue-tracking-ai", "bug-reporting-automation", "collaborative-dev-environments", "team-communication-tech", "task-time-management-ai", "customer-feedback-ai", "cloud-based-dev-tech", "image-stock-platforms-ai", "video-hosting-tech", "social-networks-ai", "professional-social-networks-ai", "dating-apps-tech"]}
2024-02-02T11:42:35+00:00
[]
[ "ru", "en" ]
TAGS #task_categories-token-classification #task_categories-text-generation #size_categories-1K<n<10K #language-Russian #language-English #license-apache-2.0 #synthetic #search-queries #e-commerce #online-shops #travel-agencies #educational-institutions-ai #job-recruitment-automation #banking-digital-services #investment-ai-analysis #insurance-tech-innovation #financial-advisory-ai #credit-services-automation #payment-processing-tech #mortgage-tech-solutions #real-estate-digital-solutions #taxation-tech-services #risk-management-ai #compliance-automation #digital-banking-innovation #mobile-banking-tech #online-retail-tech #offline-retail-automation #automotive-dealership-tech #restaurant-automation-tech #food-delivery-ai #entertainment-platforms-ai #media-platforms-tech #government-services-automation #travel-tech-innovation #consumer-analytics-ai #logistics-tech-automation #supply-chain-ai #customer-support-tech #market-research-ai #mobile-app-dev-tech #game-dev-ai #cloud-computing-services #data-analytics-ai #business-intelligence-ai #cybersecurity-software-tech #ui-ux-design-ai #iot-development-tech #project-management-tools-ai #version-control-systems-tech #ci-cd-automation #issue-tracking-ai #bug-reporting-automation #collaborative-dev-environments #team-communication-tech #task-time-management-ai #customer-feedback-ai #cloud-based-dev-tech #image-stock-platforms-ai #video-hosting-tech #social-networks-ai #professional-social-networks-ai #dating-apps-tech #region-us
# Synthetic Search Filters This is generated with GPT-4 Turbo possible search filters and theirs representations for the given business/service categories and for the Russian language domain: This is a parsed in the way each row is an unique pair filter - represantation version of 'EmbeddingStudio/synthetic-search-filters-ru-raw'. ## Columns description * category (type: Optional[str]) - business/service category name. * category_description (type: Optional[str]) - longer description of business/service. * filter_name (type: Optional[str]) - meaningful name of filter. * representation_name (type: Optional[str]) - name of filter representation. * representation_type (type: Optional[str]) - python-like type of representation value (str, int, float, bool) * representation_enum (type: (Optional[List[str]])) - is represntation is an enumertation, this is a list of possible values. * representation_examples (type: List[Union[str, int, float]])) - exmaples of expected representation values. * representation_pattern (type: Optional[str]) - if representation is a pattern-like (e.g. 'dd/mm/YYYY'), this is a pattern to follow. ## What are representations? It's easier to understand with an exmaple. Imagine, you have a filter named 'Rating', so it can be represented as: * Integer or float value in 1-5 scale * Integer or float value in 1-10 scale * Integer or float value in 1-100 scale * As the enumeration with values (*, , *, , *) * As the enumeration with values (bad, medium, good, the best) ## Train / test splitting principles As we are trying to fine-tune LLM to follow zero-shot query parsing instructions, so we want to test: * Ability to work well with unseen domain * Ability to work well with unseen filters * Ability to work well with unseen queries For these purposes we: 1. We put into test split 5 categories, completely separared from train: Automotive, Educational Institutions, Enterprise Software Development, Payment Processing, Professional Social Networks. 2. Also out of each appearing in train company categories, we put aside / removed one filter and queries related to it. # How to use it Embedding Studio team uses this filters to generate queries and theirs parsed version for IlyaGusev/saiga_mistral_7b_lora fine-tuning to follow Zero-Shot search queries parsing instructions.
[ "# Synthetic Search Filters\n\nThis is generated with GPT-4 Turbo possible search filters and theirs representations for the given business/service categories and for the Russian language domain:\n\n\nThis is a parsed in the way each row is an unique pair filter - represantation version of 'EmbeddingStudio/synthetic-search-filters-ru-raw'.", "## Columns description\n\n* category (type: Optional[str]) - business/service category name.\n* category_description (type: Optional[str]) - longer description of business/service.\n* filter_name (type: Optional[str]) - meaningful name of filter.\n* representation_name (type: Optional[str]) - name of filter representation.\n* representation_type (type: Optional[str]) - python-like type of representation value (str, int, float, bool)\n* representation_enum (type: (Optional[List[str]])) - is represntation is an enumertation, this is a list of possible values.\n* representation_examples (type: List[Union[str, int, float]])) - exmaples of expected representation values.\n* representation_pattern (type: Optional[str]) - if representation is a pattern-like (e.g. 'dd/mm/YYYY'), this is a pattern to follow.", "## What are representations?\n\nIt's easier to understand with an exmaple. Imagine, you have a filter named 'Rating', so it can be represented as:\n* Integer or float value in 1-5 scale\n* Integer or float value in 1-10 scale\n* Integer or float value in 1-100 scale\n* As the enumeration with values (*, , *, , *)\n* As the enumeration with values (bad, medium, good, the best)", "## Train / test splitting principles\n\nAs we are trying to fine-tune LLM to follow zero-shot query parsing instructions, so we want to test:\n\n* Ability to work well with unseen domain\n* Ability to work well with unseen filters\n* Ability to work well with unseen queries\n\nFor these purposes we:\n\n1. We put into test split 5 categories, completely separared from train: Automotive, Educational Institutions, Enterprise Software Development, Payment Processing, Professional Social Networks.\n2. Also out of each appearing in train company categories, we put aside / removed one filter and queries related to it.", "# How to use it\n\n\nEmbedding Studio team uses this filters to generate queries and theirs parsed version for IlyaGusev/saiga_mistral_7b_lora fine-tuning to follow Zero-Shot search queries parsing instructions." ]
[ "TAGS\n#task_categories-token-classification #task_categories-text-generation #size_categories-1K<n<10K #language-Russian #language-English #license-apache-2.0 #synthetic #search-queries #e-commerce #online-shops #travel-agencies #educational-institutions-ai #job-recruitment-automation #banking-digital-services #investment-ai-analysis #insurance-tech-innovation #financial-advisory-ai #credit-services-automation #payment-processing-tech #mortgage-tech-solutions #real-estate-digital-solutions #taxation-tech-services #risk-management-ai #compliance-automation #digital-banking-innovation #mobile-banking-tech #online-retail-tech #offline-retail-automation #automotive-dealership-tech #restaurant-automation-tech #food-delivery-ai #entertainment-platforms-ai #media-platforms-tech #government-services-automation #travel-tech-innovation #consumer-analytics-ai #logistics-tech-automation #supply-chain-ai #customer-support-tech #market-research-ai #mobile-app-dev-tech #game-dev-ai #cloud-computing-services #data-analytics-ai #business-intelligence-ai #cybersecurity-software-tech #ui-ux-design-ai #iot-development-tech #project-management-tools-ai #version-control-systems-tech #ci-cd-automation #issue-tracking-ai #bug-reporting-automation #collaborative-dev-environments #team-communication-tech #task-time-management-ai #customer-feedback-ai #cloud-based-dev-tech #image-stock-platforms-ai #video-hosting-tech #social-networks-ai #professional-social-networks-ai #dating-apps-tech #region-us \n", "# Synthetic Search Filters\n\nThis is generated with GPT-4 Turbo possible search filters and theirs representations for the given business/service categories and for the Russian language domain:\n\n\nThis is a parsed in the way each row is an unique pair filter - represantation version of 'EmbeddingStudio/synthetic-search-filters-ru-raw'.", "## Columns description\n\n* category (type: Optional[str]) - business/service category name.\n* category_description (type: Optional[str]) - longer description of business/service.\n* filter_name (type: Optional[str]) - meaningful name of filter.\n* representation_name (type: Optional[str]) - name of filter representation.\n* representation_type (type: Optional[str]) - python-like type of representation value (str, int, float, bool)\n* representation_enum (type: (Optional[List[str]])) - is represntation is an enumertation, this is a list of possible values.\n* representation_examples (type: List[Union[str, int, float]])) - exmaples of expected representation values.\n* representation_pattern (type: Optional[str]) - if representation is a pattern-like (e.g. 'dd/mm/YYYY'), this is a pattern to follow.", "## What are representations?\n\nIt's easier to understand with an exmaple. Imagine, you have a filter named 'Rating', so it can be represented as:\n* Integer or float value in 1-5 scale\n* Integer or float value in 1-10 scale\n* Integer or float value in 1-100 scale\n* As the enumeration with values (*, , *, , *)\n* As the enumeration with values (bad, medium, good, the best)", "## Train / test splitting principles\n\nAs we are trying to fine-tune LLM to follow zero-shot query parsing instructions, so we want to test:\n\n* Ability to work well with unseen domain\n* Ability to work well with unseen filters\n* Ability to work well with unseen queries\n\nFor these purposes we:\n\n1. We put into test split 5 categories, completely separared from train: Automotive, Educational Institutions, Enterprise Software Development, Payment Processing, Professional Social Networks.\n2. Also out of each appearing in train company categories, we put aside / removed one filter and queries related to it.", "# How to use it\n\n\nEmbedding Studio team uses this filters to generate queries and theirs parsed version for IlyaGusev/saiga_mistral_7b_lora fine-tuning to follow Zero-Shot search queries parsing instructions." ]
c1c8a789adef3b8a2a13d4d7ae49bc9c68f0c487
# Synthetic Search Filters Raw: Russian This is the raw version of [EmbeddingStudio/synthetic-search-filters-ru dataset](https://huggingface.co/datasets/EmbeddingStudio/synthetic-search-filters-ru) for Russian language domain. This is generated with GPT-4 Turbo possible search filters and theirs representations for the given business/service categories: ``` Artificial Intelligence and Machine Learning, Automotive Dealerships, Banking Services, Books and Media, Cloud Computing Services, Cloud-based Development Environments, Collaborative Development Environments, Commercial Real Estate, Continuous Integration/Continuous Deployment, Credit Services, Customer Support Services, Customer Support and Feedback, Cybersecurity Software, Data Analytics and Business Intelligence, Dating Apps, Digital and Mobile Banking, Documentation and Knowledge Sharing, E-commerce Platforms, Eco-Friendly and Sustainable Properties, Electronics, Entertainment and Media Platforms, Event Planning Services, Fashion and Apparel, Financial Planning and Advisory, Food and Grocery, Game Development, Government Services, Health and Beauty, Healthcare Providers, Home and Garden, Image Stock Platforms, Insurance Services, International Real Estate, Internet of Things (IoT) Development, Investment Services, Issue Tracking and Bug Reporting, Job Recruitment Agencies, Land Sales and Acquisitions, Legal Services, Logistics and Supply Chain Management, Luxury and High-End Properties, Market Research Firms, Mobile App Development, Mortgage and Real Estate Services, Pet Supplies, Project Management Tools, Property Management, Real Estate Consulting, Real Estate Development, Real Estate Investment, Residential Real Estate, Restaurants and Food Delivery Services, Retail Stores (Online and Offline), Risk Management and Compliance, Social Networks, Sports and Outdoors, Task and Time Management, Taxation Services, Team Communication and Chat Tools, Telecommunication Companies, Toys and Games, Travel and Booking Agencies, Travelers and Consumers, User Interface/User Experience Design, Version Control Systems, Video Hosting and Portals, Web Development ``` ## Columns description * Category (type: str) - JSON parsable name and description of a business / service. * Filters (type: str) - JSON parsable filters schema Filters schema is JSON-readable line in the format (we highly recommend you to use it): List of filters (dict): * Name - name of filter (better to be meaningful). * Representations - list of possible filter formats (dict): * Name - name of representation (better to be meaningful). * Type - python base type (int, float, str, bool). * Examples - list of examples. * Enum - if a representation is enumeration, provide a list of possible values, LLM should map parsed value into this list. * Pattern - if a representation is pattern-like (datetime, regexp, etc.) provide a pattern text in any format. ## What are representations? It's easier to understand with an exmaple. Imagine, you have a filter named `Rating`, so it can be represented as: * Integer or float value in 1-5 scale * Integer or float value in 1-10 scale * Integer or float value in 1-100 scale * As the enumeration with values (*, **, ***, ****, *****) * As the enumeration with values (bad, medium, good, the best) ## Train / test splitting principles As we are trying to fine-tune LLM to follow zero-shot query parsing instructions, so we want to test: * Ability to work well with unseen domain * Ability to work well with unseen filters * Ability to work well with unseen queries For these purposes we: 1. We put into test split 5 categories, completely separared from train: Automotive, Educational Institutions, Enterprise Software Development, Payment Processing, Professional Social Networks. 2. Also out of each appearing in train company categories, we put aside / removed one filter and queries related to it. # How to use it ```python import json from datasets import load_dataset filters_dataset = load_dataset("EmbeddingStudio/synthetic-search-filters-ru-raw") train_filters_schema = dict() for row in filters_dataset['train_filters_raw']: train_filters_schema[json.loads(row['Category'])['category']] = json.loads(row['Filters']) test_filters_schema = dict() for row in filters_dataset['test_filters_raw']: test_filters_schema[json.loads(row['Category'])['category']] = json.loads(row['Filters']) ``` Embedding Studio team uses this filters to [generate queries and theirs parsed version](EmbeddingStudio/query-parsing-instructions-saiga) for [IlyaGusev/saiga_mistral_7b_lora](https://huggingface.co/IlyaGusev/saiga_mistral_7b_lora) [fine-tuning to follow Zero-Shot search queries parsing instructions](https://huggingface.co/EmbeddingStudio/query-parser-saiga-mistral-7b-lora).
EmbeddingStudio/synthetic-search-filters-ru-raw
[ "task_categories:token-classification", "task_categories:text-generation", "size_categories:1K<n<10K", "language:ru", "language:en", "license:apache-2.0", "synthetic", "search-queries", "e-commerce", "online-shops", "travel-agencies", "educational-institutions-ai", "job-recruitment-automation", "banking-digital-services", "investment-ai-analysis", "insurance-tech-innovation", "financial-advisory-ai", "credit-services-automation", "payment-processing-tech", "mortgage-tech-solutions", "real-estate-digital-solutions", "taxation-tech-services", "risk-management-ai", "compliance-automation", "digital-banking-innovation", "mobile-banking-tech", "online-retail-tech", "offline-retail-automation", "automotive-dealership-tech", "restaurant-automation-tech", "food-delivery-ai", "entertainment-platforms-ai", "media-platforms-tech", "government-services-automation", "travel-tech-innovation", "consumer-analytics-ai", "logistics-tech-automation", "supply-chain-ai", "customer-support-tech", "market-research-ai", "mobile-app-dev-tech", "game-dev-ai", "cloud-computing-services", "data-analytics-ai", "business-intelligence-ai", "cybersecurity-software-tech", "ui-ux-design-ai", "iot-development-tech", "project-management-tools-ai", "version-control-systems-tech", "ci-cd-automation", "issue-tracking-ai", "bug-reporting-automation", "collaborative-dev-environments", "team-communication-tech", "task-time-management-ai", "customer-feedback-ai", "cloud-based-dev-tech", "image-stock-platforms-ai", "video-hosting-tech", "social-networks-ai", "professional-social-networks-ai", "dating-apps-tech", "region:us" ]
2024-02-02T08:35:31+00:00
{"language": ["ru", "en"], "license": "apache-2.0", "size_categories": ["1K<n<10K"], "task_categories": ["token-classification", "text-generation"], "pretty_name": "Synthetic Search Filters Raw: Russian", "dataset_info": {"features": [{"name": "Category", "dtype": "string"}, {"name": "Filters", "sequence": "string"}], "splits": [{"name": "train_filters_raw", "num_bytes": 848000, "num_examples": 69}, {"name": "test_filters_raw", "num_bytes": 1022352, "num_examples": 74}], "download_size": 301762, "dataset_size": 1870352}, "configs": [{"config_name": "default", "data_files": [{"split": "train_filters_raw", "path": "data/train_filters_raw-*"}, {"split": "test_filters_raw", "path": "data/test_filters_raw-*"}]}], "tags": ["synthetic", "search-queries", "e-commerce", "online-shops", "travel-agencies", "educational-institutions-ai", "job-recruitment-automation", "banking-digital-services", "investment-ai-analysis", "insurance-tech-innovation", "financial-advisory-ai", "credit-services-automation", "payment-processing-tech", "mortgage-tech-solutions", "real-estate-digital-solutions", "taxation-tech-services", "risk-management-ai", "compliance-automation", "digital-banking-innovation", "mobile-banking-tech", "online-retail-tech", "offline-retail-automation", "automotive-dealership-tech", "restaurant-automation-tech", "food-delivery-ai", "entertainment-platforms-ai", "media-platforms-tech", "government-services-automation", "travel-tech-innovation", "consumer-analytics-ai", "logistics-tech-automation", "supply-chain-ai", "customer-support-tech", "market-research-ai", "mobile-app-dev-tech", "game-dev-ai", "cloud-computing-services", "data-analytics-ai", "business-intelligence-ai", "cybersecurity-software-tech", "ui-ux-design-ai", "iot-development-tech", "project-management-tools-ai", "version-control-systems-tech", "ci-cd-automation", "issue-tracking-ai", "bug-reporting-automation", "collaborative-dev-environments", "team-communication-tech", "task-time-management-ai", "customer-feedback-ai", "cloud-based-dev-tech", "image-stock-platforms-ai", "video-hosting-tech", "social-networks-ai", "professional-social-networks-ai", "dating-apps-tech"]}
2024-02-02T10:23:10+00:00
[]
[ "ru", "en" ]
TAGS #task_categories-token-classification #task_categories-text-generation #size_categories-1K<n<10K #language-Russian #language-English #license-apache-2.0 #synthetic #search-queries #e-commerce #online-shops #travel-agencies #educational-institutions-ai #job-recruitment-automation #banking-digital-services #investment-ai-analysis #insurance-tech-innovation #financial-advisory-ai #credit-services-automation #payment-processing-tech #mortgage-tech-solutions #real-estate-digital-solutions #taxation-tech-services #risk-management-ai #compliance-automation #digital-banking-innovation #mobile-banking-tech #online-retail-tech #offline-retail-automation #automotive-dealership-tech #restaurant-automation-tech #food-delivery-ai #entertainment-platforms-ai #media-platforms-tech #government-services-automation #travel-tech-innovation #consumer-analytics-ai #logistics-tech-automation #supply-chain-ai #customer-support-tech #market-research-ai #mobile-app-dev-tech #game-dev-ai #cloud-computing-services #data-analytics-ai #business-intelligence-ai #cybersecurity-software-tech #ui-ux-design-ai #iot-development-tech #project-management-tools-ai #version-control-systems-tech #ci-cd-automation #issue-tracking-ai #bug-reporting-automation #collaborative-dev-environments #team-communication-tech #task-time-management-ai #customer-feedback-ai #cloud-based-dev-tech #image-stock-platforms-ai #video-hosting-tech #social-networks-ai #professional-social-networks-ai #dating-apps-tech #region-us
# Synthetic Search Filters Raw: Russian This is the raw version of EmbeddingStudio/synthetic-search-filters-ru dataset for Russian language domain. This is generated with GPT-4 Turbo possible search filters and theirs representations for the given business/service categories: ## Columns description * Category (type: str) - JSON parsable name and description of a business / service. * Filters (type: str) - JSON parsable filters schema Filters schema is JSON-readable line in the format (we highly recommend you to use it): List of filters (dict): * Name - name of filter (better to be meaningful). * Representations - list of possible filter formats (dict): * Name - name of representation (better to be meaningful). * Type - python base type (int, float, str, bool). * Examples - list of examples. * Enum - if a representation is enumeration, provide a list of possible values, LLM should map parsed value into this list. * Pattern - if a representation is pattern-like (datetime, regexp, etc.) provide a pattern text in any format. ## What are representations? It's easier to understand with an exmaple. Imagine, you have a filter named 'Rating', so it can be represented as: * Integer or float value in 1-5 scale * Integer or float value in 1-10 scale * Integer or float value in 1-100 scale * As the enumeration with values (*, , *, , *) * As the enumeration with values (bad, medium, good, the best) ## Train / test splitting principles As we are trying to fine-tune LLM to follow zero-shot query parsing instructions, so we want to test: * Ability to work well with unseen domain * Ability to work well with unseen filters * Ability to work well with unseen queries For these purposes we: 1. We put into test split 5 categories, completely separared from train: Automotive, Educational Institutions, Enterprise Software Development, Payment Processing, Professional Social Networks. 2. Also out of each appearing in train company categories, we put aside / removed one filter and queries related to it. # How to use it Embedding Studio team uses this filters to generate queries and theirs parsed version for IlyaGusev/saiga_mistral_7b_lora fine-tuning to follow Zero-Shot search queries parsing instructions.
[ "# Synthetic Search Filters Raw: Russian\n\nThis is the raw version of EmbeddingStudio/synthetic-search-filters-ru dataset for Russian language domain.\n\nThis is generated with GPT-4 Turbo possible search filters and theirs representations for the given business/service categories:", "## Columns description\n\n* Category (type: str) - JSON parsable name and description of a business / service.\n* Filters (type: str) - JSON parsable filters schema\n\nFilters schema is JSON-readable line in the format (we highly recommend you to use it): \nList of filters (dict):\n\n* Name - name of filter (better to be meaningful).\n* Representations - list of possible filter formats (dict):\n * Name - name of representation (better to be meaningful).\n * Type - python base type (int, float, str, bool).\n * Examples - list of examples.\n * Enum - if a representation is enumeration, provide a list of possible values, LLM should map parsed value into this list.\n * Pattern - if a representation is pattern-like (datetime, regexp, etc.) provide a pattern text in any format.", "## What are representations?\n\nIt's easier to understand with an exmaple. Imagine, you have a filter named 'Rating', so it can be represented as:\n* Integer or float value in 1-5 scale\n* Integer or float value in 1-10 scale\n* Integer or float value in 1-100 scale\n* As the enumeration with values (*, , *, , *)\n* As the enumeration with values (bad, medium, good, the best)", "## Train / test splitting principles\n\nAs we are trying to fine-tune LLM to follow zero-shot query parsing instructions, so we want to test:\n\n* Ability to work well with unseen domain\n* Ability to work well with unseen filters\n* Ability to work well with unseen queries\n\nFor these purposes we:\n\n1. We put into test split 5 categories, completely separared from train: Automotive, Educational Institutions, Enterprise Software Development, Payment Processing, Professional Social Networks.\n2. Also out of each appearing in train company categories, we put aside / removed one filter and queries related to it.", "# How to use it\n\n\nEmbedding Studio team uses this filters to generate queries and theirs parsed version for IlyaGusev/saiga_mistral_7b_lora fine-tuning to follow Zero-Shot search queries parsing instructions." ]
[ "TAGS\n#task_categories-token-classification #task_categories-text-generation #size_categories-1K<n<10K #language-Russian #language-English #license-apache-2.0 #synthetic #search-queries #e-commerce #online-shops #travel-agencies #educational-institutions-ai #job-recruitment-automation #banking-digital-services #investment-ai-analysis #insurance-tech-innovation #financial-advisory-ai #credit-services-automation #payment-processing-tech #mortgage-tech-solutions #real-estate-digital-solutions #taxation-tech-services #risk-management-ai #compliance-automation #digital-banking-innovation #mobile-banking-tech #online-retail-tech #offline-retail-automation #automotive-dealership-tech #restaurant-automation-tech #food-delivery-ai #entertainment-platforms-ai #media-platforms-tech #government-services-automation #travel-tech-innovation #consumer-analytics-ai #logistics-tech-automation #supply-chain-ai #customer-support-tech #market-research-ai #mobile-app-dev-tech #game-dev-ai #cloud-computing-services #data-analytics-ai #business-intelligence-ai #cybersecurity-software-tech #ui-ux-design-ai #iot-development-tech #project-management-tools-ai #version-control-systems-tech #ci-cd-automation #issue-tracking-ai #bug-reporting-automation #collaborative-dev-environments #team-communication-tech #task-time-management-ai #customer-feedback-ai #cloud-based-dev-tech #image-stock-platforms-ai #video-hosting-tech #social-networks-ai #professional-social-networks-ai #dating-apps-tech #region-us \n", "# Synthetic Search Filters Raw: Russian\n\nThis is the raw version of EmbeddingStudio/synthetic-search-filters-ru dataset for Russian language domain.\n\nThis is generated with GPT-4 Turbo possible search filters and theirs representations for the given business/service categories:", "## Columns description\n\n* Category (type: str) - JSON parsable name and description of a business / service.\n* Filters (type: str) - JSON parsable filters schema\n\nFilters schema is JSON-readable line in the format (we highly recommend you to use it): \nList of filters (dict):\n\n* Name - name of filter (better to be meaningful).\n* Representations - list of possible filter formats (dict):\n * Name - name of representation (better to be meaningful).\n * Type - python base type (int, float, str, bool).\n * Examples - list of examples.\n * Enum - if a representation is enumeration, provide a list of possible values, LLM should map parsed value into this list.\n * Pattern - if a representation is pattern-like (datetime, regexp, etc.) provide a pattern text in any format.", "## What are representations?\n\nIt's easier to understand with an exmaple. Imagine, you have a filter named 'Rating', so it can be represented as:\n* Integer or float value in 1-5 scale\n* Integer or float value in 1-10 scale\n* Integer or float value in 1-100 scale\n* As the enumeration with values (*, , *, , *)\n* As the enumeration with values (bad, medium, good, the best)", "## Train / test splitting principles\n\nAs we are trying to fine-tune LLM to follow zero-shot query parsing instructions, so we want to test:\n\n* Ability to work well with unseen domain\n* Ability to work well with unseen filters\n* Ability to work well with unseen queries\n\nFor these purposes we:\n\n1. We put into test split 5 categories, completely separared from train: Automotive, Educational Institutions, Enterprise Software Development, Payment Processing, Professional Social Networks.\n2. Also out of each appearing in train company categories, we put aside / removed one filter and queries related to it.", "# How to use it\n\n\nEmbedding Studio team uses this filters to generate queries and theirs parsed version for IlyaGusev/saiga_mistral_7b_lora fine-tuning to follow Zero-Shot search queries parsing instructions." ]
b3b4b0c7d2d89086dbfeee4b8974e9bfa43ecc6e
# Dataset Card for Evaluation run of JaeyeonKang/CCK_Gony_v3.2 <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [JaeyeonKang/CCK_Gony_v3.2](https://huggingface.co/JaeyeonKang/CCK_Gony_v3.2) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 3 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_JaeyeonKang__CCK_Gony_v3.2", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-02-02T09:25:22.859036](https://huggingface.co/datasets/open-llm-leaderboard/details_JaeyeonKang__CCK_Gony_v3.2/blob/main/results_2024-02-02T09-25-22.859036.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.7047084782513622, "acc_stderr": 0.030315860999845592, "acc_norm": 0.7093203962624949, "acc_norm_stderr": 0.030894611332654892, "mc1": 0.4418604651162791, "mc1_stderr": 0.017384767478986218, "mc2": 0.5881032370657441, "mc2_stderr": 0.015065851872175183 }, "harness|arc:challenge|25": { "acc": 0.6467576791808873, "acc_stderr": 0.013967822714840055, "acc_norm": 0.6945392491467577, "acc_norm_stderr": 0.013460080478002508 }, "harness|hellaswag|10": { "acc": 0.6735710017924716, "acc_stderr": 0.004679479763516775, "acc_norm": 0.8691495717984465, "acc_norm_stderr": 0.003365474860676741 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.35, "acc_stderr": 0.04793724854411021, "acc_norm": 0.35, "acc_norm_stderr": 0.04793724854411021 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.6888888888888889, "acc_stderr": 0.039992628766177214, "acc_norm": 0.6888888888888889, "acc_norm_stderr": 0.039992628766177214 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.7960526315789473, "acc_stderr": 0.0327900040631005, "acc_norm": 0.7960526315789473, "acc_norm_stderr": 0.0327900040631005 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.72, "acc_stderr": 0.04512608598542127, "acc_norm": 0.72, "acc_norm_stderr": 0.04512608598542127 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.769811320754717, "acc_stderr": 0.02590789712240817, "acc_norm": 0.769811320754717, "acc_norm_stderr": 0.02590789712240817 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.8541666666666666, "acc_stderr": 0.029514245964291766, "acc_norm": 0.8541666666666666, "acc_norm_stderr": 0.029514245964291766 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.52, "acc_stderr": 0.050211673156867795, "acc_norm": 0.52, "acc_norm_stderr": 0.050211673156867795 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.63, "acc_stderr": 0.048523658709391, "acc_norm": 0.63, "acc_norm_stderr": 0.048523658709391 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.43, "acc_stderr": 0.04975698519562428, "acc_norm": 0.43, "acc_norm_stderr": 0.04975698519562428 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.7456647398843931, "acc_stderr": 0.03320556443085569, "acc_norm": 0.7456647398843931, "acc_norm_stderr": 0.03320556443085569 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.4019607843137255, "acc_stderr": 0.04878608714466996, "acc_norm": 0.4019607843137255, "acc_norm_stderr": 0.04878608714466996 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.8, "acc_stderr": 0.04020151261036845, "acc_norm": 0.8, "acc_norm_stderr": 0.04020151261036845 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.6553191489361702, "acc_stderr": 0.03106898596312215, "acc_norm": 0.6553191489361702, "acc_norm_stderr": 0.03106898596312215 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.5789473684210527, "acc_stderr": 0.046446020912223177, "acc_norm": 0.5789473684210527, "acc_norm_stderr": 0.046446020912223177 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.6689655172413793, "acc_stderr": 0.03921545312467122, "acc_norm": 0.6689655172413793, "acc_norm_stderr": 0.03921545312467122 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.49206349206349204, "acc_stderr": 0.025748065871673272, "acc_norm": 0.49206349206349204, "acc_norm_stderr": 0.025748065871673272 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.5158730158730159, "acc_stderr": 0.044698818540726076, "acc_norm": 0.5158730158730159, "acc_norm_stderr": 0.044698818540726076 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.38, "acc_stderr": 0.04878317312145632, "acc_norm": 0.38, "acc_norm_stderr": 0.04878317312145632 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.8548387096774194, "acc_stderr": 0.020039563628053283, "acc_norm": 0.8548387096774194, "acc_norm_stderr": 0.020039563628053283 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.5911330049261084, "acc_stderr": 0.034590588158832314, "acc_norm": 0.5911330049261084, "acc_norm_stderr": 0.034590588158832314 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.75, "acc_stderr": 0.04351941398892446, "acc_norm": 0.75, "acc_norm_stderr": 0.04351941398892446 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.793939393939394, "acc_stderr": 0.0315841532404771, "acc_norm": 0.793939393939394, "acc_norm_stderr": 0.0315841532404771 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.8585858585858586, "acc_stderr": 0.024825909793343336, "acc_norm": 0.8585858585858586, "acc_norm_stderr": 0.024825909793343336 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.9481865284974094, "acc_stderr": 0.01599622932024412, "acc_norm": 0.9481865284974094, "acc_norm_stderr": 0.01599622932024412 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.7153846153846154, "acc_stderr": 0.0228783227997063, "acc_norm": 0.7153846153846154, "acc_norm_stderr": 0.0228783227997063 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.35555555555555557, "acc_stderr": 0.0291857149498574, "acc_norm": 0.35555555555555557, "acc_norm_stderr": 0.0291857149498574 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.7983193277310925, "acc_stderr": 0.026064313406304534, "acc_norm": 0.7983193277310925, "acc_norm_stderr": 0.026064313406304534 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.4304635761589404, "acc_stderr": 0.040428099613956346, "acc_norm": 0.4304635761589404, "acc_norm_stderr": 0.040428099613956346 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.8825688073394495, "acc_stderr": 0.013802780227377342, "acc_norm": 0.8825688073394495, "acc_norm_stderr": 0.013802780227377342 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.6064814814814815, "acc_stderr": 0.03331747876370312, "acc_norm": 0.6064814814814815, "acc_norm_stderr": 0.03331747876370312 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.8725490196078431, "acc_stderr": 0.02340553048084631, "acc_norm": 0.8725490196078431, "acc_norm_stderr": 0.02340553048084631 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.8607594936708861, "acc_stderr": 0.022535526352692705, "acc_norm": 0.8607594936708861, "acc_norm_stderr": 0.022535526352692705 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.757847533632287, "acc_stderr": 0.028751392398694755, "acc_norm": 0.757847533632287, "acc_norm_stderr": 0.028751392398694755 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.7862595419847328, "acc_stderr": 0.0359546161177469, "acc_norm": 0.7862595419847328, "acc_norm_stderr": 0.0359546161177469 }, "harness|hendrycksTest-international_law|5": { "acc": 0.8760330578512396, "acc_stderr": 0.030083098716035202, "acc_norm": 0.8760330578512396, "acc_norm_stderr": 0.030083098716035202 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.8148148148148148, "acc_stderr": 0.03755265865037182, "acc_norm": 0.8148148148148148, "acc_norm_stderr": 0.03755265865037182 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.803680981595092, "acc_stderr": 0.031207970394709218, "acc_norm": 0.803680981595092, "acc_norm_stderr": 0.031207970394709218 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.5982142857142857, "acc_stderr": 0.04653333146973647, "acc_norm": 0.5982142857142857, "acc_norm_stderr": 0.04653333146973647 }, "harness|hendrycksTest-management|5": { "acc": 0.8446601941747572, "acc_stderr": 0.035865947385739734, "acc_norm": 0.8446601941747572, "acc_norm_stderr": 0.035865947385739734 }, "harness|hendrycksTest-marketing|5": { "acc": 0.9273504273504274, "acc_stderr": 0.01700436856813235, "acc_norm": 0.9273504273504274, "acc_norm_stderr": 0.01700436856813235 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.78, "acc_stderr": 0.04163331998932262, "acc_norm": 0.78, "acc_norm_stderr": 0.04163331998932262 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.8812260536398467, "acc_stderr": 0.011569134791715655, "acc_norm": 0.8812260536398467, "acc_norm_stderr": 0.011569134791715655 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.7687861271676301, "acc_stderr": 0.02269865716785571, "acc_norm": 0.7687861271676301, "acc_norm_stderr": 0.02269865716785571 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.464804469273743, "acc_stderr": 0.01668102093107665, "acc_norm": 0.464804469273743, "acc_norm_stderr": 0.01668102093107665 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.8202614379084967, "acc_stderr": 0.021986032182064148, "acc_norm": 0.8202614379084967, "acc_norm_stderr": 0.021986032182064148 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.8006430868167203, "acc_stderr": 0.022691033780549656, "acc_norm": 0.8006430868167203, "acc_norm_stderr": 0.022691033780549656 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.8240740740740741, "acc_stderr": 0.021185893615225153, "acc_norm": 0.8240740740740741, "acc_norm_stderr": 0.021185893615225153 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.5319148936170213, "acc_stderr": 0.02976667507587387, "acc_norm": 0.5319148936170213, "acc_norm_stderr": 0.02976667507587387 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.5378096479791395, "acc_stderr": 0.012733671880342506, "acc_norm": 0.5378096479791395, "acc_norm_stderr": 0.012733671880342506 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.8014705882352942, "acc_stderr": 0.024231013370541073, "acc_norm": 0.8014705882352942, "acc_norm_stderr": 0.024231013370541073 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.7532679738562091, "acc_stderr": 0.0174408203674025, "acc_norm": 0.7532679738562091, "acc_norm_stderr": 0.0174408203674025 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.6909090909090909, "acc_stderr": 0.044262946482000985, "acc_norm": 0.6909090909090909, "acc_norm_stderr": 0.044262946482000985 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.7510204081632653, "acc_stderr": 0.027682979522960234, "acc_norm": 0.7510204081632653, "acc_norm_stderr": 0.027682979522960234 }, "harness|hendrycksTest-sociology|5": { "acc": 0.8756218905472637, "acc_stderr": 0.023335401790166327, "acc_norm": 0.8756218905472637, "acc_norm_stderr": 0.023335401790166327 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.89, "acc_stderr": 0.03144660377352203, "acc_norm": 0.89, "acc_norm_stderr": 0.03144660377352203 }, "harness|hendrycksTest-virology|5": { "acc": 0.5060240963855421, "acc_stderr": 0.03892212195333045, "acc_norm": 0.5060240963855421, "acc_norm_stderr": 0.03892212195333045 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.8830409356725146, "acc_stderr": 0.02464806896136615, "acc_norm": 0.8830409356725146, "acc_norm_stderr": 0.02464806896136615 }, "harness|truthfulqa:mc|0": { "mc1": 0.4418604651162791, "mc1_stderr": 0.017384767478986218, "mc2": 0.5881032370657441, "mc2_stderr": 0.015065851872175183 }, "harness|winogrande|5": { "acc": 0.8097868981846882, "acc_stderr": 0.01103033579861744 }, "harness|gsm8k|5": { "acc": 0.5708870356330553, "acc_stderr": 0.013633369425647236 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_JaeyeonKang__CCK_Gony_v3.2
[ "region:us" ]
2024-02-02T08:39:32+00:00
{"pretty_name": "Evaluation run of JaeyeonKang/CCK_Gony_v3.2", "dataset_summary": "Dataset automatically created during the evaluation run of model [JaeyeonKang/CCK_Gony_v3.2](https://huggingface.co/JaeyeonKang/CCK_Gony_v3.2) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 3 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_JaeyeonKang__CCK_Gony_v3.2\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-02-02T09:25:22.859036](https://huggingface.co/datasets/open-llm-leaderboard/details_JaeyeonKang__CCK_Gony_v3.2/blob/main/results_2024-02-02T09-25-22.859036.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.7047084782513622,\n \"acc_stderr\": 0.030315860999845592,\n \"acc_norm\": 0.7093203962624949,\n \"acc_norm_stderr\": 0.030894611332654892,\n \"mc1\": 0.4418604651162791,\n \"mc1_stderr\": 0.017384767478986218,\n \"mc2\": 0.5881032370657441,\n \"mc2_stderr\": 0.015065851872175183\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.6467576791808873,\n \"acc_stderr\": 0.013967822714840055,\n \"acc_norm\": 0.6945392491467577,\n \"acc_norm_stderr\": 0.013460080478002508\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.6735710017924716,\n \"acc_stderr\": 0.004679479763516775,\n \"acc_norm\": 0.8691495717984465,\n \"acc_norm_stderr\": 0.003365474860676741\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.35,\n \"acc_stderr\": 0.04793724854411021,\n \"acc_norm\": 0.35,\n \"acc_norm_stderr\": 0.04793724854411021\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.6888888888888889,\n \"acc_stderr\": 0.039992628766177214,\n \"acc_norm\": 0.6888888888888889,\n \"acc_norm_stderr\": 0.039992628766177214\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.7960526315789473,\n \"acc_stderr\": 0.0327900040631005,\n \"acc_norm\": 0.7960526315789473,\n \"acc_norm_stderr\": 0.0327900040631005\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.72,\n \"acc_stderr\": 0.04512608598542127,\n \"acc_norm\": 0.72,\n \"acc_norm_stderr\": 0.04512608598542127\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.769811320754717,\n \"acc_stderr\": 0.02590789712240817,\n \"acc_norm\": 0.769811320754717,\n \"acc_norm_stderr\": 0.02590789712240817\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.8541666666666666,\n \"acc_stderr\": 0.029514245964291766,\n \"acc_norm\": 0.8541666666666666,\n \"acc_norm_stderr\": 0.029514245964291766\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.52,\n \"acc_stderr\": 0.050211673156867795,\n \"acc_norm\": 0.52,\n \"acc_norm_stderr\": 0.050211673156867795\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.63,\n \"acc_stderr\": 0.048523658709391,\n \"acc_norm\": 0.63,\n \"acc_norm_stderr\": 0.048523658709391\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.43,\n \"acc_stderr\": 0.04975698519562428,\n \"acc_norm\": 0.43,\n \"acc_norm_stderr\": 0.04975698519562428\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.7456647398843931,\n \"acc_stderr\": 0.03320556443085569,\n \"acc_norm\": 0.7456647398843931,\n \"acc_norm_stderr\": 0.03320556443085569\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.4019607843137255,\n \"acc_stderr\": 0.04878608714466996,\n \"acc_norm\": 0.4019607843137255,\n \"acc_norm_stderr\": 0.04878608714466996\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.8,\n \"acc_stderr\": 0.04020151261036845,\n \"acc_norm\": 0.8,\n \"acc_norm_stderr\": 0.04020151261036845\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.6553191489361702,\n \"acc_stderr\": 0.03106898596312215,\n \"acc_norm\": 0.6553191489361702,\n \"acc_norm_stderr\": 0.03106898596312215\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.5789473684210527,\n \"acc_stderr\": 0.046446020912223177,\n \"acc_norm\": 0.5789473684210527,\n \"acc_norm_stderr\": 0.046446020912223177\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.6689655172413793,\n \"acc_stderr\": 0.03921545312467122,\n \"acc_norm\": 0.6689655172413793,\n \"acc_norm_stderr\": 0.03921545312467122\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.49206349206349204,\n \"acc_stderr\": 0.025748065871673272,\n \"acc_norm\": 0.49206349206349204,\n \"acc_norm_stderr\": 0.025748065871673272\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.5158730158730159,\n \"acc_stderr\": 0.044698818540726076,\n \"acc_norm\": 0.5158730158730159,\n \"acc_norm_stderr\": 0.044698818540726076\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.38,\n \"acc_stderr\": 0.04878317312145632,\n \"acc_norm\": 0.38,\n \"acc_norm_stderr\": 0.04878317312145632\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.8548387096774194,\n \"acc_stderr\": 0.020039563628053283,\n \"acc_norm\": 0.8548387096774194,\n \"acc_norm_stderr\": 0.020039563628053283\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.5911330049261084,\n \"acc_stderr\": 0.034590588158832314,\n \"acc_norm\": 0.5911330049261084,\n \"acc_norm_stderr\": 0.034590588158832314\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.75,\n \"acc_stderr\": 0.04351941398892446,\n \"acc_norm\": 0.75,\n \"acc_norm_stderr\": 0.04351941398892446\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.793939393939394,\n \"acc_stderr\": 0.0315841532404771,\n \"acc_norm\": 0.793939393939394,\n \"acc_norm_stderr\": 0.0315841532404771\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.8585858585858586,\n \"acc_stderr\": 0.024825909793343336,\n \"acc_norm\": 0.8585858585858586,\n \"acc_norm_stderr\": 0.024825909793343336\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.9481865284974094,\n \"acc_stderr\": 0.01599622932024412,\n \"acc_norm\": 0.9481865284974094,\n \"acc_norm_stderr\": 0.01599622932024412\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.7153846153846154,\n \"acc_stderr\": 0.0228783227997063,\n \"acc_norm\": 0.7153846153846154,\n \"acc_norm_stderr\": 0.0228783227997063\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.35555555555555557,\n \"acc_stderr\": 0.0291857149498574,\n \"acc_norm\": 0.35555555555555557,\n \"acc_norm_stderr\": 0.0291857149498574\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.7983193277310925,\n \"acc_stderr\": 0.026064313406304534,\n \"acc_norm\": 0.7983193277310925,\n \"acc_norm_stderr\": 0.026064313406304534\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.4304635761589404,\n \"acc_stderr\": 0.040428099613956346,\n \"acc_norm\": 0.4304635761589404,\n \"acc_norm_stderr\": 0.040428099613956346\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.8825688073394495,\n \"acc_stderr\": 0.013802780227377342,\n \"acc_norm\": 0.8825688073394495,\n \"acc_norm_stderr\": 0.013802780227377342\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.6064814814814815,\n \"acc_stderr\": 0.03331747876370312,\n \"acc_norm\": 0.6064814814814815,\n \"acc_norm_stderr\": 0.03331747876370312\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.8725490196078431,\n \"acc_stderr\": 0.02340553048084631,\n \"acc_norm\": 0.8725490196078431,\n \"acc_norm_stderr\": 0.02340553048084631\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.8607594936708861,\n \"acc_stderr\": 0.022535526352692705,\n \"acc_norm\": 0.8607594936708861,\n \"acc_norm_stderr\": 0.022535526352692705\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.757847533632287,\n \"acc_stderr\": 0.028751392398694755,\n \"acc_norm\": 0.757847533632287,\n \"acc_norm_stderr\": 0.028751392398694755\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.7862595419847328,\n \"acc_stderr\": 0.0359546161177469,\n \"acc_norm\": 0.7862595419847328,\n \"acc_norm_stderr\": 0.0359546161177469\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.8760330578512396,\n \"acc_stderr\": 0.030083098716035202,\n \"acc_norm\": 0.8760330578512396,\n \"acc_norm_stderr\": 0.030083098716035202\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.8148148148148148,\n \"acc_stderr\": 0.03755265865037182,\n \"acc_norm\": 0.8148148148148148,\n \"acc_norm_stderr\": 0.03755265865037182\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.803680981595092,\n \"acc_stderr\": 0.031207970394709218,\n \"acc_norm\": 0.803680981595092,\n \"acc_norm_stderr\": 0.031207970394709218\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.5982142857142857,\n \"acc_stderr\": 0.04653333146973647,\n \"acc_norm\": 0.5982142857142857,\n \"acc_norm_stderr\": 0.04653333146973647\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.8446601941747572,\n \"acc_stderr\": 0.035865947385739734,\n \"acc_norm\": 0.8446601941747572,\n \"acc_norm_stderr\": 0.035865947385739734\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.9273504273504274,\n \"acc_stderr\": 0.01700436856813235,\n \"acc_norm\": 0.9273504273504274,\n \"acc_norm_stderr\": 0.01700436856813235\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.78,\n \"acc_stderr\": 0.04163331998932262,\n \"acc_norm\": 0.78,\n \"acc_norm_stderr\": 0.04163331998932262\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.8812260536398467,\n \"acc_stderr\": 0.011569134791715655,\n \"acc_norm\": 0.8812260536398467,\n \"acc_norm_stderr\": 0.011569134791715655\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.7687861271676301,\n \"acc_stderr\": 0.02269865716785571,\n \"acc_norm\": 0.7687861271676301,\n \"acc_norm_stderr\": 0.02269865716785571\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.464804469273743,\n \"acc_stderr\": 0.01668102093107665,\n \"acc_norm\": 0.464804469273743,\n \"acc_norm_stderr\": 0.01668102093107665\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.8202614379084967,\n \"acc_stderr\": 0.021986032182064148,\n \"acc_norm\": 0.8202614379084967,\n \"acc_norm_stderr\": 0.021986032182064148\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.8006430868167203,\n \"acc_stderr\": 0.022691033780549656,\n \"acc_norm\": 0.8006430868167203,\n \"acc_norm_stderr\": 0.022691033780549656\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.8240740740740741,\n \"acc_stderr\": 0.021185893615225153,\n \"acc_norm\": 0.8240740740740741,\n \"acc_norm_stderr\": 0.021185893615225153\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.5319148936170213,\n \"acc_stderr\": 0.02976667507587387,\n \"acc_norm\": 0.5319148936170213,\n \"acc_norm_stderr\": 0.02976667507587387\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.5378096479791395,\n \"acc_stderr\": 0.012733671880342506,\n \"acc_norm\": 0.5378096479791395,\n \"acc_norm_stderr\": 0.012733671880342506\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.8014705882352942,\n \"acc_stderr\": 0.024231013370541073,\n \"acc_norm\": 0.8014705882352942,\n \"acc_norm_stderr\": 0.024231013370541073\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.7532679738562091,\n \"acc_stderr\": 0.0174408203674025,\n \"acc_norm\": 0.7532679738562091,\n \"acc_norm_stderr\": 0.0174408203674025\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6909090909090909,\n \"acc_stderr\": 0.044262946482000985,\n \"acc_norm\": 0.6909090909090909,\n \"acc_norm_stderr\": 0.044262946482000985\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.7510204081632653,\n \"acc_stderr\": 0.027682979522960234,\n \"acc_norm\": 0.7510204081632653,\n \"acc_norm_stderr\": 0.027682979522960234\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.8756218905472637,\n \"acc_stderr\": 0.023335401790166327,\n \"acc_norm\": 0.8756218905472637,\n \"acc_norm_stderr\": 0.023335401790166327\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.89,\n \"acc_stderr\": 0.03144660377352203,\n \"acc_norm\": 0.89,\n \"acc_norm_stderr\": 0.03144660377352203\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.5060240963855421,\n \"acc_stderr\": 0.03892212195333045,\n \"acc_norm\": 0.5060240963855421,\n \"acc_norm_stderr\": 0.03892212195333045\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.8830409356725146,\n \"acc_stderr\": 0.02464806896136615,\n \"acc_norm\": 0.8830409356725146,\n \"acc_norm_stderr\": 0.02464806896136615\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.4418604651162791,\n \"mc1_stderr\": 0.017384767478986218,\n \"mc2\": 0.5881032370657441,\n \"mc2_stderr\": 0.015065851872175183\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.8097868981846882,\n \"acc_stderr\": 0.01103033579861744\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.5708870356330553,\n \"acc_stderr\": 0.013633369425647236\n }\n}\n```", "repo_url": "https://huggingface.co/JaeyeonKang/CCK_Gony_v3.2", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_02_02T08_37_17.217721", "path": ["**/details_harness|arc:challenge|25_2024-02-02T08-37-17.217721.parquet"]}, {"split": "2024_02_02T09_00_50.830888", "path": ["**/details_harness|arc:challenge|25_2024-02-02T09-00-50.830888.parquet"]}, {"split": "2024_02_02T09_25_22.859036", "path": ["**/details_harness|arc:challenge|25_2024-02-02T09-25-22.859036.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-02-02T09-25-22.859036.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_02_02T08_37_17.217721", "path": ["**/details_harness|gsm8k|5_2024-02-02T08-37-17.217721.parquet"]}, {"split": "2024_02_02T09_00_50.830888", "path": ["**/details_harness|gsm8k|5_2024-02-02T09-00-50.830888.parquet"]}, {"split": "2024_02_02T09_25_22.859036", "path": ["**/details_harness|gsm8k|5_2024-02-02T09-25-22.859036.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-02-02T09-25-22.859036.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_02_02T08_37_17.217721", "path": ["**/details_harness|hellaswag|10_2024-02-02T08-37-17.217721.parquet"]}, {"split": "2024_02_02T09_00_50.830888", "path": ["**/details_harness|hellaswag|10_2024-02-02T09-00-50.830888.parquet"]}, {"split": "2024_02_02T09_25_22.859036", "path": ["**/details_harness|hellaswag|10_2024-02-02T09-25-22.859036.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-02-02T09-25-22.859036.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_02_02T08_37_17.217721", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-02T08-37-17.217721.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-02-02T08-37-17.217721.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-02-02T08-37-17.217721.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-02-02T08-37-17.217721.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-02T08-37-17.217721.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-02-02T08-37-17.217721.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-02-02T08-37-17.217721.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-02-02T08-37-17.217721.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-02-02T08-37-17.217721.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-02-02T08-37-17.217721.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-02-02T08-37-17.217721.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-02-02T08-37-17.217721.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-02T08-37-17.217721.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-02-02T08-37-17.217721.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-02T08-37-17.217721.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-02T08-37-17.217721.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-02-02T08-37-17.217721.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-02-02T08-37-17.217721.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-02-02T08-37-17.217721.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-02T08-37-17.217721.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-02T08-37-17.217721.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-02T08-37-17.217721.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-02-02T08-37-17.217721.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-02T08-37-17.217721.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-02T08-37-17.217721.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-02T08-37-17.217721.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-02T08-37-17.217721.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-02-02T08-37-17.217721.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-02T08-37-17.217721.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-02T08-37-17.217721.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-02T08-37-17.217721.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-02T08-37-17.217721.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-02-02T08-37-17.217721.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-02-02T08-37-17.217721.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-02-02T08-37-17.217721.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-02-02T08-37-17.217721.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-02T08-37-17.217721.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-02-02T08-37-17.217721.parquet", "**/details_harness|hendrycksTest-management|5_2024-02-02T08-37-17.217721.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-02-02T08-37-17.217721.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-02-02T08-37-17.217721.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-02-02T08-37-17.217721.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-02-02T08-37-17.217721.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-02T08-37-17.217721.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-02-02T08-37-17.217721.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-02-02T08-37-17.217721.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-02-02T08-37-17.217721.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-02-02T08-37-17.217721.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-02-02T08-37-17.217721.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-02-02T08-37-17.217721.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-02-02T08-37-17.217721.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-02-02T08-37-17.217721.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-02-02T08-37-17.217721.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-02-02T08-37-17.217721.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-02T08-37-17.217721.parquet", "**/details_harness|hendrycksTest-virology|5_2024-02-02T08-37-17.217721.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-02-02T08-37-17.217721.parquet"]}, {"split": "2024_02_02T09_00_50.830888", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-02T09-00-50.830888.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-02-02T09-00-50.830888.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-02-02T09-00-50.830888.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-02-02T09-00-50.830888.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-02T09-00-50.830888.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-02-02T09-00-50.830888.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-02-02T09-00-50.830888.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-02-02T09-00-50.830888.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-02-02T09-00-50.830888.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-02-02T09-00-50.830888.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-02-02T09-00-50.830888.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-02-02T09-00-50.830888.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-02T09-00-50.830888.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-02-02T09-00-50.830888.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-02T09-00-50.830888.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-02T09-00-50.830888.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-02-02T09-00-50.830888.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-02-02T09-00-50.830888.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-02-02T09-00-50.830888.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-02T09-00-50.830888.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-02T09-00-50.830888.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-02T09-00-50.830888.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-02-02T09-00-50.830888.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-02T09-00-50.830888.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-02T09-00-50.830888.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-02T09-00-50.830888.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-02T09-00-50.830888.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-02-02T09-00-50.830888.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-02T09-00-50.830888.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-02T09-00-50.830888.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-02T09-00-50.830888.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-02T09-00-50.830888.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-02-02T09-00-50.830888.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-02-02T09-00-50.830888.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-02-02T09-00-50.830888.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-02-02T09-00-50.830888.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-02T09-00-50.830888.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-02-02T09-00-50.830888.parquet", "**/details_harness|hendrycksTest-management|5_2024-02-02T09-00-50.830888.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-02-02T09-00-50.830888.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-02-02T09-00-50.830888.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-02-02T09-00-50.830888.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-02-02T09-00-50.830888.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-02T09-00-50.830888.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-02-02T09-00-50.830888.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-02-02T09-00-50.830888.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-02-02T09-00-50.830888.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-02-02T09-00-50.830888.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-02-02T09-00-50.830888.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-02-02T09-00-50.830888.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-02-02T09-00-50.830888.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-02-02T09-00-50.830888.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-02-02T09-00-50.830888.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-02-02T09-00-50.830888.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-02T09-00-50.830888.parquet", "**/details_harness|hendrycksTest-virology|5_2024-02-02T09-00-50.830888.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-02-02T09-00-50.830888.parquet"]}, {"split": "2024_02_02T09_25_22.859036", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-02T09-25-22.859036.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-02-02T09-25-22.859036.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-02-02T09-25-22.859036.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-02-02T09-25-22.859036.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-02T09-25-22.859036.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-02-02T09-25-22.859036.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-02-02T09-25-22.859036.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-02-02T09-25-22.859036.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-02-02T09-25-22.859036.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-02-02T09-25-22.859036.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-02-02T09-25-22.859036.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-02-02T09-25-22.859036.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-02T09-25-22.859036.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-02-02T09-25-22.859036.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-02T09-25-22.859036.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-02T09-25-22.859036.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-02-02T09-25-22.859036.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-02-02T09-25-22.859036.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-02-02T09-25-22.859036.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-02T09-25-22.859036.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-02T09-25-22.859036.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-02T09-25-22.859036.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-02-02T09-25-22.859036.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-02T09-25-22.859036.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-02T09-25-22.859036.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-02T09-25-22.859036.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-02T09-25-22.859036.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-02-02T09-25-22.859036.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-02T09-25-22.859036.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-02T09-25-22.859036.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-02T09-25-22.859036.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-02T09-25-22.859036.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-02-02T09-25-22.859036.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-02-02T09-25-22.859036.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-02-02T09-25-22.859036.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-02-02T09-25-22.859036.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-02T09-25-22.859036.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-02-02T09-25-22.859036.parquet", "**/details_harness|hendrycksTest-management|5_2024-02-02T09-25-22.859036.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-02-02T09-25-22.859036.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-02-02T09-25-22.859036.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-02-02T09-25-22.859036.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-02-02T09-25-22.859036.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-02T09-25-22.859036.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-02-02T09-25-22.859036.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-02-02T09-25-22.859036.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-02-02T09-25-22.859036.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-02-02T09-25-22.859036.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-02-02T09-25-22.859036.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-02-02T09-25-22.859036.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-02-02T09-25-22.859036.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-02-02T09-25-22.859036.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-02-02T09-25-22.859036.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-02-02T09-25-22.859036.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-02T09-25-22.859036.parquet", "**/details_harness|hendrycksTest-virology|5_2024-02-02T09-25-22.859036.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-02-02T09-25-22.859036.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-02T09-25-22.859036.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-02-02T09-25-22.859036.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-02-02T09-25-22.859036.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-02-02T09-25-22.859036.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-02T09-25-22.859036.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-02-02T09-25-22.859036.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-02-02T09-25-22.859036.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-02-02T09-25-22.859036.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-02-02T09-25-22.859036.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-02-02T09-25-22.859036.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-02-02T09-25-22.859036.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-02-02T09-25-22.859036.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-02T09-25-22.859036.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-02-02T09-25-22.859036.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-02T09-25-22.859036.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-02T09-25-22.859036.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-02-02T09-25-22.859036.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-02-02T09-25-22.859036.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-02-02T09-25-22.859036.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-02T09-25-22.859036.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-02T09-25-22.859036.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-02T09-25-22.859036.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-02-02T09-25-22.859036.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-02T09-25-22.859036.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-02T09-25-22.859036.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-02T09-25-22.859036.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-02T09-25-22.859036.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-02-02T09-25-22.859036.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-02T09-25-22.859036.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-02T09-25-22.859036.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-02T09-25-22.859036.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-02T09-25-22.859036.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-02-02T09-25-22.859036.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-02-02T09-25-22.859036.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-02-02T09-25-22.859036.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-02-02T09-25-22.859036.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-02T09-25-22.859036.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-02-02T09-25-22.859036.parquet", "**/details_harness|hendrycksTest-management|5_2024-02-02T09-25-22.859036.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-02-02T09-25-22.859036.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-02-02T09-25-22.859036.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-02-02T09-25-22.859036.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-02-02T09-25-22.859036.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-02T09-25-22.859036.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-02-02T09-25-22.859036.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-02-02T09-25-22.859036.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-02-02T09-25-22.859036.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-02-02T09-25-22.859036.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-02-02T09-25-22.859036.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-02-02T09-25-22.859036.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-02-02T09-25-22.859036.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-02-02T09-25-22.859036.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-02-02T09-25-22.859036.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-02-02T09-25-22.859036.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-02T09-25-22.859036.parquet", "**/details_harness|hendrycksTest-virology|5_2024-02-02T09-25-22.859036.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-02-02T09-25-22.859036.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_02_02T08_37_17.217721", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-02T08-37-17.217721.parquet"]}, {"split": "2024_02_02T09_00_50.830888", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-02T09-00-50.830888.parquet"]}, {"split": "2024_02_02T09_25_22.859036", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-02T09-25-22.859036.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-02T09-25-22.859036.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_02_02T08_37_17.217721", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-02-02T08-37-17.217721.parquet"]}, {"split": "2024_02_02T09_00_50.830888", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-02-02T09-00-50.830888.parquet"]}, {"split": "2024_02_02T09_25_22.859036", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-02-02T09-25-22.859036.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-02-02T09-25-22.859036.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_02_02T08_37_17.217721", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-02-02T08-37-17.217721.parquet"]}, {"split": "2024_02_02T09_00_50.830888", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-02-02T09-00-50.830888.parquet"]}, {"split": "2024_02_02T09_25_22.859036", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-02-02T09-25-22.859036.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-02-02T09-25-22.859036.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_02_02T08_37_17.217721", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-02-02T08-37-17.217721.parquet"]}, {"split": "2024_02_02T09_00_50.830888", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-02-02T09-00-50.830888.parquet"]}, {"split": "2024_02_02T09_25_22.859036", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-02-02T09-25-22.859036.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-02-02T09-25-22.859036.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_02_02T08_37_17.217721", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-02T08-37-17.217721.parquet"]}, {"split": "2024_02_02T09_00_50.830888", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-02T09-00-50.830888.parquet"]}, {"split": "2024_02_02T09_25_22.859036", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-02T09-25-22.859036.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-02T09-25-22.859036.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_02_02T08_37_17.217721", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-02-02T08-37-17.217721.parquet"]}, {"split": "2024_02_02T09_00_50.830888", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-02-02T09-00-50.830888.parquet"]}, {"split": "2024_02_02T09_25_22.859036", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-02-02T09-25-22.859036.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-02-02T09-25-22.859036.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_02_02T08_37_17.217721", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-02-02T08-37-17.217721.parquet"]}, {"split": "2024_02_02T09_00_50.830888", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-02-02T09-00-50.830888.parquet"]}, {"split": "2024_02_02T09_25_22.859036", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-02-02T09-25-22.859036.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-02-02T09-25-22.859036.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_02_02T08_37_17.217721", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-02-02T08-37-17.217721.parquet"]}, {"split": "2024_02_02T09_00_50.830888", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-02-02T09-00-50.830888.parquet"]}, {"split": "2024_02_02T09_25_22.859036", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-02-02T09-25-22.859036.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-02-02T09-25-22.859036.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_02_02T08_37_17.217721", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-02-02T08-37-17.217721.parquet"]}, {"split": "2024_02_02T09_00_50.830888", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-02-02T09-00-50.830888.parquet"]}, {"split": "2024_02_02T09_25_22.859036", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-02-02T09-25-22.859036.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-02-02T09-25-22.859036.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_02_02T08_37_17.217721", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-02-02T08-37-17.217721.parquet"]}, {"split": "2024_02_02T09_00_50.830888", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-02-02T09-00-50.830888.parquet"]}, {"split": "2024_02_02T09_25_22.859036", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-02-02T09-25-22.859036.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-02-02T09-25-22.859036.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_02_02T08_37_17.217721", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-02-02T08-37-17.217721.parquet"]}, {"split": "2024_02_02T09_00_50.830888", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-02-02T09-00-50.830888.parquet"]}, {"split": "2024_02_02T09_25_22.859036", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-02-02T09-25-22.859036.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-02-02T09-25-22.859036.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_02_02T08_37_17.217721", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-02-02T08-37-17.217721.parquet"]}, {"split": "2024_02_02T09_00_50.830888", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-02-02T09-00-50.830888.parquet"]}, {"split": "2024_02_02T09_25_22.859036", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-02-02T09-25-22.859036.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-02-02T09-25-22.859036.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_02_02T08_37_17.217721", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-02T08-37-17.217721.parquet"]}, {"split": "2024_02_02T09_00_50.830888", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-02T09-00-50.830888.parquet"]}, {"split": "2024_02_02T09_25_22.859036", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-02T09-25-22.859036.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-02T09-25-22.859036.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_02_02T08_37_17.217721", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-02-02T08-37-17.217721.parquet"]}, {"split": "2024_02_02T09_00_50.830888", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-02-02T09-00-50.830888.parquet"]}, {"split": "2024_02_02T09_25_22.859036", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-02-02T09-25-22.859036.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-02-02T09-25-22.859036.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_02_02T08_37_17.217721", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-02T08-37-17.217721.parquet"]}, {"split": "2024_02_02T09_00_50.830888", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-02T09-00-50.830888.parquet"]}, {"split": "2024_02_02T09_25_22.859036", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-02T09-25-22.859036.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-02T09-25-22.859036.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_02_02T08_37_17.217721", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-02T08-37-17.217721.parquet"]}, {"split": "2024_02_02T09_00_50.830888", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-02T09-00-50.830888.parquet"]}, {"split": "2024_02_02T09_25_22.859036", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-02T09-25-22.859036.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-02T09-25-22.859036.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_02_02T08_37_17.217721", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-02-02T08-37-17.217721.parquet"]}, {"split": "2024_02_02T09_00_50.830888", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-02-02T09-00-50.830888.parquet"]}, {"split": "2024_02_02T09_25_22.859036", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-02-02T09-25-22.859036.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-02-02T09-25-22.859036.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_02_02T08_37_17.217721", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-02-02T08-37-17.217721.parquet"]}, {"split": "2024_02_02T09_00_50.830888", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-02-02T09-00-50.830888.parquet"]}, {"split": "2024_02_02T09_25_22.859036", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-02-02T09-25-22.859036.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-02-02T09-25-22.859036.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_02_02T08_37_17.217721", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-02-02T08-37-17.217721.parquet"]}, {"split": "2024_02_02T09_00_50.830888", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-02-02T09-00-50.830888.parquet"]}, {"split": "2024_02_02T09_25_22.859036", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-02-02T09-25-22.859036.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-02-02T09-25-22.859036.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_02_02T08_37_17.217721", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-02T08-37-17.217721.parquet"]}, {"split": "2024_02_02T09_00_50.830888", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-02T09-00-50.830888.parquet"]}, {"split": "2024_02_02T09_25_22.859036", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-02T09-25-22.859036.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-02T09-25-22.859036.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_02_02T08_37_17.217721", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-02T08-37-17.217721.parquet"]}, {"split": "2024_02_02T09_00_50.830888", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-02T09-00-50.830888.parquet"]}, {"split": "2024_02_02T09_25_22.859036", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-02T09-25-22.859036.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-02T09-25-22.859036.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_02_02T08_37_17.217721", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-02T08-37-17.217721.parquet"]}, {"split": "2024_02_02T09_00_50.830888", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-02T09-00-50.830888.parquet"]}, {"split": "2024_02_02T09_25_22.859036", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-02T09-25-22.859036.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-02T09-25-22.859036.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_02_02T08_37_17.217721", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-02-02T08-37-17.217721.parquet"]}, {"split": "2024_02_02T09_00_50.830888", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-02-02T09-00-50.830888.parquet"]}, {"split": "2024_02_02T09_25_22.859036", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-02-02T09-25-22.859036.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-02-02T09-25-22.859036.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_02_02T08_37_17.217721", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-02T08-37-17.217721.parquet"]}, {"split": "2024_02_02T09_00_50.830888", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-02T09-00-50.830888.parquet"]}, {"split": "2024_02_02T09_25_22.859036", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-02T09-25-22.859036.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-02T09-25-22.859036.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_02_02T08_37_17.217721", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-02T08-37-17.217721.parquet"]}, {"split": "2024_02_02T09_00_50.830888", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-02T09-00-50.830888.parquet"]}, {"split": "2024_02_02T09_25_22.859036", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-02T09-25-22.859036.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-02T09-25-22.859036.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_02_02T08_37_17.217721", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-02T08-37-17.217721.parquet"]}, {"split": "2024_02_02T09_00_50.830888", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-02T09-00-50.830888.parquet"]}, {"split": "2024_02_02T09_25_22.859036", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-02T09-25-22.859036.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-02T09-25-22.859036.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_02_02T08_37_17.217721", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-02T08-37-17.217721.parquet"]}, {"split": "2024_02_02T09_00_50.830888", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-02T09-00-50.830888.parquet"]}, {"split": "2024_02_02T09_25_22.859036", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-02T09-25-22.859036.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-02T09-25-22.859036.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_02_02T08_37_17.217721", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-02-02T08-37-17.217721.parquet"]}, {"split": "2024_02_02T09_00_50.830888", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-02-02T09-00-50.830888.parquet"]}, {"split": "2024_02_02T09_25_22.859036", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-02-02T09-25-22.859036.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-02-02T09-25-22.859036.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_02_02T08_37_17.217721", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-02T08-37-17.217721.parquet"]}, {"split": "2024_02_02T09_00_50.830888", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-02T09-00-50.830888.parquet"]}, {"split": "2024_02_02T09_25_22.859036", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-02T09-25-22.859036.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-02T09-25-22.859036.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_02_02T08_37_17.217721", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-02T08-37-17.217721.parquet"]}, {"split": "2024_02_02T09_00_50.830888", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-02T09-00-50.830888.parquet"]}, {"split": "2024_02_02T09_25_22.859036", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-02T09-25-22.859036.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-02T09-25-22.859036.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_02_02T08_37_17.217721", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-02T08-37-17.217721.parquet"]}, {"split": "2024_02_02T09_00_50.830888", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-02T09-00-50.830888.parquet"]}, {"split": "2024_02_02T09_25_22.859036", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-02T09-25-22.859036.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-02T09-25-22.859036.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_02_02T08_37_17.217721", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-02T08-37-17.217721.parquet"]}, {"split": "2024_02_02T09_00_50.830888", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-02T09-00-50.830888.parquet"]}, {"split": "2024_02_02T09_25_22.859036", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-02T09-25-22.859036.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-02T09-25-22.859036.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_02_02T08_37_17.217721", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-02-02T08-37-17.217721.parquet"]}, {"split": "2024_02_02T09_00_50.830888", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-02-02T09-00-50.830888.parquet"]}, {"split": "2024_02_02T09_25_22.859036", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-02-02T09-25-22.859036.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-02-02T09-25-22.859036.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_02_02T08_37_17.217721", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-02-02T08-37-17.217721.parquet"]}, {"split": "2024_02_02T09_00_50.830888", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-02-02T09-00-50.830888.parquet"]}, {"split": "2024_02_02T09_25_22.859036", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-02-02T09-25-22.859036.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-02-02T09-25-22.859036.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_02_02T08_37_17.217721", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-02-02T08-37-17.217721.parquet"]}, {"split": "2024_02_02T09_00_50.830888", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-02-02T09-00-50.830888.parquet"]}, {"split": "2024_02_02T09_25_22.859036", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-02-02T09-25-22.859036.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-02-02T09-25-22.859036.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_02_02T08_37_17.217721", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-02-02T08-37-17.217721.parquet"]}, {"split": "2024_02_02T09_00_50.830888", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-02-02T09-00-50.830888.parquet"]}, {"split": "2024_02_02T09_25_22.859036", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-02-02T09-25-22.859036.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-02-02T09-25-22.859036.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_02_02T08_37_17.217721", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-02T08-37-17.217721.parquet"]}, {"split": "2024_02_02T09_00_50.830888", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-02T09-00-50.830888.parquet"]}, {"split": "2024_02_02T09_25_22.859036", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-02T09-25-22.859036.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-02T09-25-22.859036.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_02_02T08_37_17.217721", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-02-02T08-37-17.217721.parquet"]}, {"split": "2024_02_02T09_00_50.830888", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-02-02T09-00-50.830888.parquet"]}, {"split": "2024_02_02T09_25_22.859036", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-02-02T09-25-22.859036.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-02-02T09-25-22.859036.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_02_02T08_37_17.217721", "path": ["**/details_harness|hendrycksTest-management|5_2024-02-02T08-37-17.217721.parquet"]}, {"split": "2024_02_02T09_00_50.830888", "path": ["**/details_harness|hendrycksTest-management|5_2024-02-02T09-00-50.830888.parquet"]}, {"split": "2024_02_02T09_25_22.859036", "path": ["**/details_harness|hendrycksTest-management|5_2024-02-02T09-25-22.859036.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-02-02T09-25-22.859036.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_02_02T08_37_17.217721", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-02-02T08-37-17.217721.parquet"]}, {"split": "2024_02_02T09_00_50.830888", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-02-02T09-00-50.830888.parquet"]}, {"split": "2024_02_02T09_25_22.859036", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-02-02T09-25-22.859036.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-02-02T09-25-22.859036.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_02_02T08_37_17.217721", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-02-02T08-37-17.217721.parquet"]}, {"split": "2024_02_02T09_00_50.830888", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-02-02T09-00-50.830888.parquet"]}, {"split": "2024_02_02T09_25_22.859036", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-02-02T09-25-22.859036.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-02-02T09-25-22.859036.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_02_02T08_37_17.217721", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-02-02T08-37-17.217721.parquet"]}, {"split": "2024_02_02T09_00_50.830888", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-02-02T09-00-50.830888.parquet"]}, {"split": "2024_02_02T09_25_22.859036", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-02-02T09-25-22.859036.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-02-02T09-25-22.859036.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_02_02T08_37_17.217721", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-02-02T08-37-17.217721.parquet"]}, {"split": "2024_02_02T09_00_50.830888", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-02-02T09-00-50.830888.parquet"]}, {"split": "2024_02_02T09_25_22.859036", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-02-02T09-25-22.859036.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-02-02T09-25-22.859036.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_02_02T08_37_17.217721", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-02T08-37-17.217721.parquet"]}, {"split": "2024_02_02T09_00_50.830888", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-02T09-00-50.830888.parquet"]}, {"split": "2024_02_02T09_25_22.859036", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-02T09-25-22.859036.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-02T09-25-22.859036.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_02_02T08_37_17.217721", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-02-02T08-37-17.217721.parquet"]}, {"split": "2024_02_02T09_00_50.830888", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-02-02T09-00-50.830888.parquet"]}, {"split": "2024_02_02T09_25_22.859036", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-02-02T09-25-22.859036.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-02-02T09-25-22.859036.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_02_02T08_37_17.217721", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-02-02T08-37-17.217721.parquet"]}, {"split": "2024_02_02T09_00_50.830888", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-02-02T09-00-50.830888.parquet"]}, {"split": "2024_02_02T09_25_22.859036", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-02-02T09-25-22.859036.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-02-02T09-25-22.859036.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_02_02T08_37_17.217721", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-02-02T08-37-17.217721.parquet"]}, {"split": "2024_02_02T09_00_50.830888", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-02-02T09-00-50.830888.parquet"]}, {"split": "2024_02_02T09_25_22.859036", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-02-02T09-25-22.859036.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-02-02T09-25-22.859036.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_02_02T08_37_17.217721", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-02-02T08-37-17.217721.parquet"]}, {"split": "2024_02_02T09_00_50.830888", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-02-02T09-00-50.830888.parquet"]}, {"split": "2024_02_02T09_25_22.859036", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-02-02T09-25-22.859036.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-02-02T09-25-22.859036.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_02_02T08_37_17.217721", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-02-02T08-37-17.217721.parquet"]}, {"split": "2024_02_02T09_00_50.830888", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-02-02T09-00-50.830888.parquet"]}, {"split": "2024_02_02T09_25_22.859036", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-02-02T09-25-22.859036.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-02-02T09-25-22.859036.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_02_02T08_37_17.217721", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-02-02T08-37-17.217721.parquet"]}, {"split": "2024_02_02T09_00_50.830888", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-02-02T09-00-50.830888.parquet"]}, {"split": "2024_02_02T09_25_22.859036", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-02-02T09-25-22.859036.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-02-02T09-25-22.859036.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_02_02T08_37_17.217721", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-02-02T08-37-17.217721.parquet"]}, {"split": "2024_02_02T09_00_50.830888", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-02-02T09-00-50.830888.parquet"]}, {"split": "2024_02_02T09_25_22.859036", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-02-02T09-25-22.859036.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-02-02T09-25-22.859036.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_02_02T08_37_17.217721", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-02-02T08-37-17.217721.parquet"]}, {"split": "2024_02_02T09_00_50.830888", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-02-02T09-00-50.830888.parquet"]}, {"split": "2024_02_02T09_25_22.859036", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-02-02T09-25-22.859036.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-02-02T09-25-22.859036.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_02_02T08_37_17.217721", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-02-02T08-37-17.217721.parquet"]}, {"split": "2024_02_02T09_00_50.830888", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-02-02T09-00-50.830888.parquet"]}, {"split": "2024_02_02T09_25_22.859036", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-02-02T09-25-22.859036.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-02-02T09-25-22.859036.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_02_02T08_37_17.217721", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-02-02T08-37-17.217721.parquet"]}, {"split": "2024_02_02T09_00_50.830888", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-02-02T09-00-50.830888.parquet"]}, {"split": "2024_02_02T09_25_22.859036", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-02-02T09-25-22.859036.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-02-02T09-25-22.859036.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_02_02T08_37_17.217721", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-02T08-37-17.217721.parquet"]}, {"split": "2024_02_02T09_00_50.830888", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-02T09-00-50.830888.parquet"]}, {"split": "2024_02_02T09_25_22.859036", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-02T09-25-22.859036.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-02T09-25-22.859036.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_02_02T08_37_17.217721", "path": ["**/details_harness|hendrycksTest-virology|5_2024-02-02T08-37-17.217721.parquet"]}, {"split": "2024_02_02T09_00_50.830888", "path": ["**/details_harness|hendrycksTest-virology|5_2024-02-02T09-00-50.830888.parquet"]}, {"split": "2024_02_02T09_25_22.859036", "path": ["**/details_harness|hendrycksTest-virology|5_2024-02-02T09-25-22.859036.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-02-02T09-25-22.859036.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_02_02T08_37_17.217721", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-02-02T08-37-17.217721.parquet"]}, {"split": "2024_02_02T09_00_50.830888", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-02-02T09-00-50.830888.parquet"]}, {"split": "2024_02_02T09_25_22.859036", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-02-02T09-25-22.859036.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-02-02T09-25-22.859036.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_02_02T08_37_17.217721", "path": ["**/details_harness|truthfulqa:mc|0_2024-02-02T08-37-17.217721.parquet"]}, {"split": "2024_02_02T09_00_50.830888", "path": ["**/details_harness|truthfulqa:mc|0_2024-02-02T09-00-50.830888.parquet"]}, {"split": "2024_02_02T09_25_22.859036", "path": ["**/details_harness|truthfulqa:mc|0_2024-02-02T09-25-22.859036.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-02-02T09-25-22.859036.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_02_02T08_37_17.217721", "path": ["**/details_harness|winogrande|5_2024-02-02T08-37-17.217721.parquet"]}, {"split": "2024_02_02T09_00_50.830888", "path": ["**/details_harness|winogrande|5_2024-02-02T09-00-50.830888.parquet"]}, {"split": "2024_02_02T09_25_22.859036", "path": ["**/details_harness|winogrande|5_2024-02-02T09-25-22.859036.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-02-02T09-25-22.859036.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_02_02T08_37_17.217721", "path": ["results_2024-02-02T08-37-17.217721.parquet"]}, {"split": "2024_02_02T09_00_50.830888", "path": ["results_2024-02-02T09-00-50.830888.parquet"]}, {"split": "2024_02_02T09_25_22.859036", "path": ["results_2024-02-02T09-25-22.859036.parquet"]}, {"split": "latest", "path": ["results_2024-02-02T09-25-22.859036.parquet"]}]}]}
2024-02-02T09:27:43+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of JaeyeonKang/CCK_Gony_v3.2 Dataset automatically created during the evaluation run of model JaeyeonKang/CCK_Gony_v3.2 on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 3 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-02-02T09:25:22.859036(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of JaeyeonKang/CCK_Gony_v3.2\n\n\n\nDataset automatically created during the evaluation run of model JaeyeonKang/CCK_Gony_v3.2 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 3 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-02-02T09:25:22.859036(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of JaeyeonKang/CCK_Gony_v3.2\n\n\n\nDataset automatically created during the evaluation run of model JaeyeonKang/CCK_Gony_v3.2 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 3 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-02-02T09:25:22.859036(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
28267d3cd711531f91360242f6f078181003a27c
### Licensing Information The dataset is released under the terms of [ODC-BY](https://opendatacommons.org/licenses/by/1-0/). By using this, you are also bound to the respective Terms of Use and License of the original source. ### Citation Information Schwenk et al, CCMatrix: Mining Billions of High-Quality Parallel Sentences on the Web. ACL https://aclanthology.org/2021.acl-long.507/ Hefferman et al, Bitext Mining Using Distilled Sentence Representations for Low-Resource Languages. Arxiv https://arxiv.org/abs/2205.12654, 2022.<br> NLLB Team et al, No Language Left Behind: Scaling Human-Centered Machine Translation, Arxiv https://arxiv.org/abs/2207.04672, 2022. ### Contributions We thank the NLLB Meta AI team for open sourcing the meta data and instructions on how to use it with special thanks to Bapi Akula, Pierre Andrews, Onur Çelebi, Sergey Edunov, Kenneth Heafield, Philipp Koehn, Alex Mourachko, Safiyyah Saleem, Holger Schwenk, and Guillaume Wenzek. We also thank the AllenNLP team at AI2 for hosting and releasing this data, including Akshita Bhagia (for engineering efforts to host the data, and create the huggingface dataset), and Jesse Dodge (for organizing the connection).
NLPC-UOM/nllb-top25k-enta-cleaned
[ "task_categories:translation", "size_categories:10K<n<100K", "language:en", "language:ta", "license:odc-by", "arxiv:2205.12654", "arxiv:2207.04672", "region:us" ]
2024-02-02T08:44:24+00:00
{"language": ["en", "ta"], "license": "odc-by", "size_categories": ["10K<n<100K"], "task_categories": ["translation"]}
2024-02-15T06:51:19+00:00
[ "2205.12654", "2207.04672" ]
[ "en", "ta" ]
TAGS #task_categories-translation #size_categories-10K<n<100K #language-English #language-Tamil #license-odc-by #arxiv-2205.12654 #arxiv-2207.04672 #region-us
### Licensing Information The dataset is released under the terms of ODC-BY. By using this, you are also bound to the respective Terms of Use and License of the original source. Schwenk et al, CCMatrix: Mining Billions of High-Quality Parallel Sentences on the Web. ACL URL Hefferman et al, Bitext Mining Using Distilled Sentence Representations for Low-Resource Languages. Arxiv URL 2022.<br> NLLB Team et al, No Language Left Behind: Scaling Human-Centered Machine Translation, Arxiv URL 2022. ### Contributions We thank the NLLB Meta AI team for open sourcing the meta data and instructions on how to use it with special thanks to Bapi Akula, Pierre Andrews, Onur Çelebi, Sergey Edunov, Kenneth Heafield, Philipp Koehn, Alex Mourachko, Safiyyah Saleem, Holger Schwenk, and Guillaume Wenzek. We also thank the AllenNLP team at AI2 for hosting and releasing this data, including Akshita Bhagia (for engineering efforts to host the data, and create the huggingface dataset), and Jesse Dodge (for organizing the connection).
[ "### Licensing Information\n\nThe dataset is released under the terms of ODC-BY. By using this, you are also bound to the respective Terms of Use and License of the original source.\n\n\n\n\nSchwenk et al, CCMatrix: Mining Billions of High-Quality Parallel Sentences on the Web. ACL URL\nHefferman et al, Bitext Mining Using Distilled Sentence Representations for Low-Resource Languages. Arxiv URL 2022.<br>\nNLLB Team et al, No Language Left Behind: Scaling Human-Centered Machine Translation, Arxiv URL 2022.", "### Contributions\n\nWe thank the NLLB Meta AI team for open sourcing the meta data and instructions on how to use it with special thanks to Bapi Akula, Pierre Andrews, Onur Çelebi, Sergey Edunov, Kenneth Heafield, Philipp Koehn, Alex Mourachko, Safiyyah Saleem, Holger Schwenk, and Guillaume Wenzek. We also thank the AllenNLP team at AI2 for hosting and releasing this data, including Akshita Bhagia (for engineering efforts to host the data, and create the huggingface dataset), and Jesse Dodge (for organizing the connection)." ]
[ "TAGS\n#task_categories-translation #size_categories-10K<n<100K #language-English #language-Tamil #license-odc-by #arxiv-2205.12654 #arxiv-2207.04672 #region-us \n", "### Licensing Information\n\nThe dataset is released under the terms of ODC-BY. By using this, you are also bound to the respective Terms of Use and License of the original source.\n\n\n\n\nSchwenk et al, CCMatrix: Mining Billions of High-Quality Parallel Sentences on the Web. ACL URL\nHefferman et al, Bitext Mining Using Distilled Sentence Representations for Low-Resource Languages. Arxiv URL 2022.<br>\nNLLB Team et al, No Language Left Behind: Scaling Human-Centered Machine Translation, Arxiv URL 2022.", "### Contributions\n\nWe thank the NLLB Meta AI team for open sourcing the meta data and instructions on how to use it with special thanks to Bapi Akula, Pierre Andrews, Onur Çelebi, Sergey Edunov, Kenneth Heafield, Philipp Koehn, Alex Mourachko, Safiyyah Saleem, Holger Schwenk, and Guillaume Wenzek. We also thank the AllenNLP team at AI2 for hosting and releasing this data, including Akshita Bhagia (for engineering efforts to host the data, and create the huggingface dataset), and Jesse Dodge (for organizing the connection)." ]
5efc69171776f8e143b1a6a17139b34f165666a3
# Dataset Card for Evaluation run of DreadPoor/NewtoccineLake-slerp-7B <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [DreadPoor/NewtoccineLake-slerp-7B](https://huggingface.co/DreadPoor/NewtoccineLake-slerp-7B) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_DreadPoor__NewtoccineLake-slerp-7B", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-02-02T08:47:01.875874](https://huggingface.co/datasets/open-llm-leaderboard/details_DreadPoor__NewtoccineLake-slerp-7B/blob/main/results_2024-02-02T08-47-01.875874.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.6491571860381778, "acc_stderr": 0.03206686241411434, "acc_norm": 0.6505026667143076, "acc_norm_stderr": 0.03271587226996518, "mc1": 0.41982864137086906, "mc1_stderr": 0.01727703030177577, "mc2": 0.5995154555010791, "mc2_stderr": 0.015251277020194909 }, "harness|arc:challenge|25": { "acc": 0.6527303754266212, "acc_stderr": 0.01391303452962045, "acc_norm": 0.6868600682593856, "acc_norm_stderr": 0.013552671543623494 }, "harness|hellaswag|10": { "acc": 0.6796454889464251, "acc_stderr": 0.004656591678606763, "acc_norm": 0.859788886675961, "acc_norm_stderr": 0.0034649633793799386 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.37, "acc_stderr": 0.04852365870939099, "acc_norm": 0.37, "acc_norm_stderr": 0.04852365870939099 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.5925925925925926, "acc_stderr": 0.04244633238353227, "acc_norm": 0.5925925925925926, "acc_norm_stderr": 0.04244633238353227 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.6907894736842105, "acc_stderr": 0.037610708698674805, "acc_norm": 0.6907894736842105, "acc_norm_stderr": 0.037610708698674805 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.6, "acc_stderr": 0.04923659639173309, "acc_norm": 0.6, "acc_norm_stderr": 0.04923659639173309 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.720754716981132, "acc_stderr": 0.027611163402399715, "acc_norm": 0.720754716981132, "acc_norm_stderr": 0.027611163402399715 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.7708333333333334, "acc_stderr": 0.03514697467862388, "acc_norm": 0.7708333333333334, "acc_norm_stderr": 0.03514697467862388 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.49, "acc_stderr": 0.05024183937956911, "acc_norm": 0.49, "acc_norm_stderr": 0.05024183937956911 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.55, "acc_stderr": 0.05, "acc_norm": 0.55, "acc_norm_stderr": 0.05 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.3, "acc_stderr": 0.046056618647183814, "acc_norm": 0.3, "acc_norm_stderr": 0.046056618647183814 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.6820809248554913, "acc_stderr": 0.0355068398916558, "acc_norm": 0.6820809248554913, "acc_norm_stderr": 0.0355068398916558 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.39215686274509803, "acc_stderr": 0.04858083574266345, "acc_norm": 0.39215686274509803, "acc_norm_stderr": 0.04858083574266345 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.76, "acc_stderr": 0.042923469599092816, "acc_norm": 0.76, "acc_norm_stderr": 0.042923469599092816 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.5872340425531914, "acc_stderr": 0.03218471141400351, "acc_norm": 0.5872340425531914, "acc_norm_stderr": 0.03218471141400351 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.47368421052631576, "acc_stderr": 0.046970851366478626, "acc_norm": 0.47368421052631576, "acc_norm_stderr": 0.046970851366478626 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.593103448275862, "acc_stderr": 0.04093793981266236, "acc_norm": 0.593103448275862, "acc_norm_stderr": 0.04093793981266236 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.3968253968253968, "acc_stderr": 0.025197101074246487, "acc_norm": 0.3968253968253968, "acc_norm_stderr": 0.025197101074246487 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.47619047619047616, "acc_stderr": 0.04467062628403273, "acc_norm": 0.47619047619047616, "acc_norm_stderr": 0.04467062628403273 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.36, "acc_stderr": 0.048241815132442176, "acc_norm": 0.36, "acc_norm_stderr": 0.048241815132442176 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.7709677419354839, "acc_stderr": 0.023904914311782648, "acc_norm": 0.7709677419354839, "acc_norm_stderr": 0.023904914311782648 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.4975369458128079, "acc_stderr": 0.03517945038691063, "acc_norm": 0.4975369458128079, "acc_norm_stderr": 0.03517945038691063 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.69, "acc_stderr": 0.04648231987117316, "acc_norm": 0.69, "acc_norm_stderr": 0.04648231987117316 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.793939393939394, "acc_stderr": 0.03158415324047711, "acc_norm": 0.793939393939394, "acc_norm_stderr": 0.03158415324047711 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.803030303030303, "acc_stderr": 0.028335609732463362, "acc_norm": 0.803030303030303, "acc_norm_stderr": 0.028335609732463362 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.8911917098445595, "acc_stderr": 0.02247325333276875, "acc_norm": 0.8911917098445595, "acc_norm_stderr": 0.02247325333276875 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.6487179487179487, "acc_stderr": 0.024203665177902803, "acc_norm": 0.6487179487179487, "acc_norm_stderr": 0.024203665177902803 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.35555555555555557, "acc_stderr": 0.02918571494985741, "acc_norm": 0.35555555555555557, "acc_norm_stderr": 0.02918571494985741 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.6890756302521008, "acc_stderr": 0.03006676158297793, "acc_norm": 0.6890756302521008, "acc_norm_stderr": 0.03006676158297793 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.3443708609271523, "acc_stderr": 0.038796870240733264, "acc_norm": 0.3443708609271523, "acc_norm_stderr": 0.038796870240733264 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.8422018348623853, "acc_stderr": 0.015630022970092444, "acc_norm": 0.8422018348623853, "acc_norm_stderr": 0.015630022970092444 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.4861111111111111, "acc_stderr": 0.03408655867977748, "acc_norm": 0.4861111111111111, "acc_norm_stderr": 0.03408655867977748 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.8480392156862745, "acc_stderr": 0.025195658428931792, "acc_norm": 0.8480392156862745, "acc_norm_stderr": 0.025195658428931792 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.7974683544303798, "acc_stderr": 0.026160568246601446, "acc_norm": 0.7974683544303798, "acc_norm_stderr": 0.026160568246601446 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.7040358744394619, "acc_stderr": 0.0306365913486998, "acc_norm": 0.7040358744394619, "acc_norm_stderr": 0.0306365913486998 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.7862595419847328, "acc_stderr": 0.0359546161177469, "acc_norm": 0.7862595419847328, "acc_norm_stderr": 0.0359546161177469 }, "harness|hendrycksTest-international_law|5": { "acc": 0.7851239669421488, "acc_stderr": 0.037494924487096966, "acc_norm": 0.7851239669421488, "acc_norm_stderr": 0.037494924487096966 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.7407407407407407, "acc_stderr": 0.04236511258094633, "acc_norm": 0.7407407407407407, "acc_norm_stderr": 0.04236511258094633 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.7607361963190185, "acc_stderr": 0.033519538795212696, "acc_norm": 0.7607361963190185, "acc_norm_stderr": 0.033519538795212696 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.4375, "acc_stderr": 0.04708567521880525, "acc_norm": 0.4375, "acc_norm_stderr": 0.04708567521880525 }, "harness|hendrycksTest-management|5": { "acc": 0.8155339805825242, "acc_stderr": 0.03840423627288276, "acc_norm": 0.8155339805825242, "acc_norm_stderr": 0.03840423627288276 }, "harness|hendrycksTest-marketing|5": { "acc": 0.9017094017094017, "acc_stderr": 0.019503444900757567, "acc_norm": 0.9017094017094017, "acc_norm_stderr": 0.019503444900757567 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.72, "acc_stderr": 0.04512608598542128, "acc_norm": 0.72, "acc_norm_stderr": 0.04512608598542128 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.8250319284802043, "acc_stderr": 0.013586619219903335, "acc_norm": 0.8250319284802043, "acc_norm_stderr": 0.013586619219903335 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.7485549132947977, "acc_stderr": 0.023357365785874037, "acc_norm": 0.7485549132947977, "acc_norm_stderr": 0.023357365785874037 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.4201117318435754, "acc_stderr": 0.016507671073256402, "acc_norm": 0.4201117318435754, "acc_norm_stderr": 0.016507671073256402 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.7189542483660131, "acc_stderr": 0.02573885479781874, "acc_norm": 0.7189542483660131, "acc_norm_stderr": 0.02573885479781874 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.7331189710610932, "acc_stderr": 0.02512263760881666, "acc_norm": 0.7331189710610932, "acc_norm_stderr": 0.02512263760881666 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.7283950617283951, "acc_stderr": 0.024748624490537368, "acc_norm": 0.7283950617283951, "acc_norm_stderr": 0.024748624490537368 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.45390070921985815, "acc_stderr": 0.029700453247291463, "acc_norm": 0.45390070921985815, "acc_norm_stderr": 0.029700453247291463 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.47783572359843546, "acc_stderr": 0.012757683047716175, "acc_norm": 0.47783572359843546, "acc_norm_stderr": 0.012757683047716175 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.6764705882352942, "acc_stderr": 0.028418208619406755, "acc_norm": 0.6764705882352942, "acc_norm_stderr": 0.028418208619406755 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.6683006535947712, "acc_stderr": 0.019047485239360378, "acc_norm": 0.6683006535947712, "acc_norm_stderr": 0.019047485239360378 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.6636363636363637, "acc_stderr": 0.04525393596302506, "acc_norm": 0.6636363636363637, "acc_norm_stderr": 0.04525393596302506 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.726530612244898, "acc_stderr": 0.028535560337128445, "acc_norm": 0.726530612244898, "acc_norm_stderr": 0.028535560337128445 }, "harness|hendrycksTest-sociology|5": { "acc": 0.8557213930348259, "acc_stderr": 0.024845753212306046, "acc_norm": 0.8557213930348259, "acc_norm_stderr": 0.024845753212306046 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.86, "acc_stderr": 0.03487350880197768, "acc_norm": 0.86, "acc_norm_stderr": 0.03487350880197768 }, "harness|hendrycksTest-virology|5": { "acc": 0.5240963855421686, "acc_stderr": 0.03887971849597264, "acc_norm": 0.5240963855421686, "acc_norm_stderr": 0.03887971849597264 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.8362573099415205, "acc_stderr": 0.028380919596145866, "acc_norm": 0.8362573099415205, "acc_norm_stderr": 0.028380919596145866 }, "harness|truthfulqa:mc|0": { "mc1": 0.41982864137086906, "mc1_stderr": 0.01727703030177577, "mc2": 0.5995154555010791, "mc2_stderr": 0.015251277020194909 }, "harness|winogrande|5": { "acc": 0.8153117600631413, "acc_stderr": 0.010905978112156876 }, "harness|gsm8k|5": { "acc": 0.6178923426838514, "acc_stderr": 0.013384173935648492 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_DreadPoor__NewtoccineLake-slerp-7B
[ "region:us" ]
2024-02-02T08:49:18+00:00
{"pretty_name": "Evaluation run of DreadPoor/NewtoccineLake-slerp-7B", "dataset_summary": "Dataset automatically created during the evaluation run of model [DreadPoor/NewtoccineLake-slerp-7B](https://huggingface.co/DreadPoor/NewtoccineLake-slerp-7B) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_DreadPoor__NewtoccineLake-slerp-7B\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-02-02T08:47:01.875874](https://huggingface.co/datasets/open-llm-leaderboard/details_DreadPoor__NewtoccineLake-slerp-7B/blob/main/results_2024-02-02T08-47-01.875874.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.6491571860381778,\n \"acc_stderr\": 0.03206686241411434,\n \"acc_norm\": 0.6505026667143076,\n \"acc_norm_stderr\": 0.03271587226996518,\n \"mc1\": 0.41982864137086906,\n \"mc1_stderr\": 0.01727703030177577,\n \"mc2\": 0.5995154555010791,\n \"mc2_stderr\": 0.015251277020194909\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.6527303754266212,\n \"acc_stderr\": 0.01391303452962045,\n \"acc_norm\": 0.6868600682593856,\n \"acc_norm_stderr\": 0.013552671543623494\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.6796454889464251,\n \"acc_stderr\": 0.004656591678606763,\n \"acc_norm\": 0.859788886675961,\n \"acc_norm_stderr\": 0.0034649633793799386\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.37,\n \"acc_stderr\": 0.04852365870939099,\n \"acc_norm\": 0.37,\n \"acc_norm_stderr\": 0.04852365870939099\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.5925925925925926,\n \"acc_stderr\": 0.04244633238353227,\n \"acc_norm\": 0.5925925925925926,\n \"acc_norm_stderr\": 0.04244633238353227\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.6907894736842105,\n \"acc_stderr\": 0.037610708698674805,\n \"acc_norm\": 0.6907894736842105,\n \"acc_norm_stderr\": 0.037610708698674805\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.6,\n \"acc_stderr\": 0.04923659639173309,\n \"acc_norm\": 0.6,\n \"acc_norm_stderr\": 0.04923659639173309\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.720754716981132,\n \"acc_stderr\": 0.027611163402399715,\n \"acc_norm\": 0.720754716981132,\n \"acc_norm_stderr\": 0.027611163402399715\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.7708333333333334,\n \"acc_stderr\": 0.03514697467862388,\n \"acc_norm\": 0.7708333333333334,\n \"acc_norm_stderr\": 0.03514697467862388\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.49,\n \"acc_stderr\": 0.05024183937956911,\n \"acc_norm\": 0.49,\n \"acc_norm_stderr\": 0.05024183937956911\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.55,\n \"acc_stderr\": 0.05,\n \"acc_norm\": 0.55,\n \"acc_norm_stderr\": 0.05\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.3,\n \"acc_stderr\": 0.046056618647183814,\n \"acc_norm\": 0.3,\n \"acc_norm_stderr\": 0.046056618647183814\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.6820809248554913,\n \"acc_stderr\": 0.0355068398916558,\n \"acc_norm\": 0.6820809248554913,\n \"acc_norm_stderr\": 0.0355068398916558\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.39215686274509803,\n \"acc_stderr\": 0.04858083574266345,\n \"acc_norm\": 0.39215686274509803,\n \"acc_norm_stderr\": 0.04858083574266345\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.76,\n \"acc_stderr\": 0.042923469599092816,\n \"acc_norm\": 0.76,\n \"acc_norm_stderr\": 0.042923469599092816\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.5872340425531914,\n \"acc_stderr\": 0.03218471141400351,\n \"acc_norm\": 0.5872340425531914,\n \"acc_norm_stderr\": 0.03218471141400351\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.47368421052631576,\n \"acc_stderr\": 0.046970851366478626,\n \"acc_norm\": 0.47368421052631576,\n \"acc_norm_stderr\": 0.046970851366478626\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.593103448275862,\n \"acc_stderr\": 0.04093793981266236,\n \"acc_norm\": 0.593103448275862,\n \"acc_norm_stderr\": 0.04093793981266236\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.3968253968253968,\n \"acc_stderr\": 0.025197101074246487,\n \"acc_norm\": 0.3968253968253968,\n \"acc_norm_stderr\": 0.025197101074246487\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.47619047619047616,\n \"acc_stderr\": 0.04467062628403273,\n \"acc_norm\": 0.47619047619047616,\n \"acc_norm_stderr\": 0.04467062628403273\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.36,\n \"acc_stderr\": 0.048241815132442176,\n \"acc_norm\": 0.36,\n \"acc_norm_stderr\": 0.048241815132442176\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.7709677419354839,\n \"acc_stderr\": 0.023904914311782648,\n \"acc_norm\": 0.7709677419354839,\n \"acc_norm_stderr\": 0.023904914311782648\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.4975369458128079,\n \"acc_stderr\": 0.03517945038691063,\n \"acc_norm\": 0.4975369458128079,\n \"acc_norm_stderr\": 0.03517945038691063\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.69,\n \"acc_stderr\": 0.04648231987117316,\n \"acc_norm\": 0.69,\n \"acc_norm_stderr\": 0.04648231987117316\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.793939393939394,\n \"acc_stderr\": 0.03158415324047711,\n \"acc_norm\": 0.793939393939394,\n \"acc_norm_stderr\": 0.03158415324047711\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.803030303030303,\n \"acc_stderr\": 0.028335609732463362,\n \"acc_norm\": 0.803030303030303,\n \"acc_norm_stderr\": 0.028335609732463362\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.8911917098445595,\n \"acc_stderr\": 0.02247325333276875,\n \"acc_norm\": 0.8911917098445595,\n \"acc_norm_stderr\": 0.02247325333276875\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.6487179487179487,\n \"acc_stderr\": 0.024203665177902803,\n \"acc_norm\": 0.6487179487179487,\n \"acc_norm_stderr\": 0.024203665177902803\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.35555555555555557,\n \"acc_stderr\": 0.02918571494985741,\n \"acc_norm\": 0.35555555555555557,\n \"acc_norm_stderr\": 0.02918571494985741\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.6890756302521008,\n \"acc_stderr\": 0.03006676158297793,\n \"acc_norm\": 0.6890756302521008,\n \"acc_norm_stderr\": 0.03006676158297793\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.3443708609271523,\n \"acc_stderr\": 0.038796870240733264,\n \"acc_norm\": 0.3443708609271523,\n \"acc_norm_stderr\": 0.038796870240733264\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.8422018348623853,\n \"acc_stderr\": 0.015630022970092444,\n \"acc_norm\": 0.8422018348623853,\n \"acc_norm_stderr\": 0.015630022970092444\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.4861111111111111,\n \"acc_stderr\": 0.03408655867977748,\n \"acc_norm\": 0.4861111111111111,\n \"acc_norm_stderr\": 0.03408655867977748\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.8480392156862745,\n \"acc_stderr\": 0.025195658428931792,\n \"acc_norm\": 0.8480392156862745,\n \"acc_norm_stderr\": 0.025195658428931792\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.7974683544303798,\n \"acc_stderr\": 0.026160568246601446,\n \"acc_norm\": 0.7974683544303798,\n \"acc_norm_stderr\": 0.026160568246601446\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.7040358744394619,\n \"acc_stderr\": 0.0306365913486998,\n \"acc_norm\": 0.7040358744394619,\n \"acc_norm_stderr\": 0.0306365913486998\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.7862595419847328,\n \"acc_stderr\": 0.0359546161177469,\n \"acc_norm\": 0.7862595419847328,\n \"acc_norm_stderr\": 0.0359546161177469\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.7851239669421488,\n \"acc_stderr\": 0.037494924487096966,\n \"acc_norm\": 0.7851239669421488,\n \"acc_norm_stderr\": 0.037494924487096966\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.7407407407407407,\n \"acc_stderr\": 0.04236511258094633,\n \"acc_norm\": 0.7407407407407407,\n \"acc_norm_stderr\": 0.04236511258094633\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.7607361963190185,\n \"acc_stderr\": 0.033519538795212696,\n \"acc_norm\": 0.7607361963190185,\n \"acc_norm_stderr\": 0.033519538795212696\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.4375,\n \"acc_stderr\": 0.04708567521880525,\n \"acc_norm\": 0.4375,\n \"acc_norm_stderr\": 0.04708567521880525\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.8155339805825242,\n \"acc_stderr\": 0.03840423627288276,\n \"acc_norm\": 0.8155339805825242,\n \"acc_norm_stderr\": 0.03840423627288276\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.9017094017094017,\n \"acc_stderr\": 0.019503444900757567,\n \"acc_norm\": 0.9017094017094017,\n \"acc_norm_stderr\": 0.019503444900757567\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.72,\n \"acc_stderr\": 0.04512608598542128,\n \"acc_norm\": 0.72,\n \"acc_norm_stderr\": 0.04512608598542128\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.8250319284802043,\n \"acc_stderr\": 0.013586619219903335,\n \"acc_norm\": 0.8250319284802043,\n \"acc_norm_stderr\": 0.013586619219903335\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.7485549132947977,\n \"acc_stderr\": 0.023357365785874037,\n \"acc_norm\": 0.7485549132947977,\n \"acc_norm_stderr\": 0.023357365785874037\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.4201117318435754,\n \"acc_stderr\": 0.016507671073256402,\n \"acc_norm\": 0.4201117318435754,\n \"acc_norm_stderr\": 0.016507671073256402\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.7189542483660131,\n \"acc_stderr\": 0.02573885479781874,\n \"acc_norm\": 0.7189542483660131,\n \"acc_norm_stderr\": 0.02573885479781874\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.7331189710610932,\n \"acc_stderr\": 0.02512263760881666,\n \"acc_norm\": 0.7331189710610932,\n \"acc_norm_stderr\": 0.02512263760881666\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.7283950617283951,\n \"acc_stderr\": 0.024748624490537368,\n \"acc_norm\": 0.7283950617283951,\n \"acc_norm_stderr\": 0.024748624490537368\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.45390070921985815,\n \"acc_stderr\": 0.029700453247291463,\n \"acc_norm\": 0.45390070921985815,\n \"acc_norm_stderr\": 0.029700453247291463\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.47783572359843546,\n \"acc_stderr\": 0.012757683047716175,\n \"acc_norm\": 0.47783572359843546,\n \"acc_norm_stderr\": 0.012757683047716175\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.6764705882352942,\n \"acc_stderr\": 0.028418208619406755,\n \"acc_norm\": 0.6764705882352942,\n \"acc_norm_stderr\": 0.028418208619406755\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.6683006535947712,\n \"acc_stderr\": 0.019047485239360378,\n \"acc_norm\": 0.6683006535947712,\n \"acc_norm_stderr\": 0.019047485239360378\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6636363636363637,\n \"acc_stderr\": 0.04525393596302506,\n \"acc_norm\": 0.6636363636363637,\n \"acc_norm_stderr\": 0.04525393596302506\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.726530612244898,\n \"acc_stderr\": 0.028535560337128445,\n \"acc_norm\": 0.726530612244898,\n \"acc_norm_stderr\": 0.028535560337128445\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.8557213930348259,\n \"acc_stderr\": 0.024845753212306046,\n \"acc_norm\": 0.8557213930348259,\n \"acc_norm_stderr\": 0.024845753212306046\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.86,\n \"acc_stderr\": 0.03487350880197768,\n \"acc_norm\": 0.86,\n \"acc_norm_stderr\": 0.03487350880197768\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.5240963855421686,\n \"acc_stderr\": 0.03887971849597264,\n \"acc_norm\": 0.5240963855421686,\n \"acc_norm_stderr\": 0.03887971849597264\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.8362573099415205,\n \"acc_stderr\": 0.028380919596145866,\n \"acc_norm\": 0.8362573099415205,\n \"acc_norm_stderr\": 0.028380919596145866\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.41982864137086906,\n \"mc1_stderr\": 0.01727703030177577,\n \"mc2\": 0.5995154555010791,\n \"mc2_stderr\": 0.015251277020194909\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.8153117600631413,\n \"acc_stderr\": 0.010905978112156876\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.6178923426838514,\n \"acc_stderr\": 0.013384173935648492\n }\n}\n```", "repo_url": "https://huggingface.co/DreadPoor/NewtoccineLake-slerp-7B", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_02_02T08_47_01.875874", "path": ["**/details_harness|arc:challenge|25_2024-02-02T08-47-01.875874.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-02-02T08-47-01.875874.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_02_02T08_47_01.875874", "path": ["**/details_harness|gsm8k|5_2024-02-02T08-47-01.875874.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-02-02T08-47-01.875874.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_02_02T08_47_01.875874", "path": ["**/details_harness|hellaswag|10_2024-02-02T08-47-01.875874.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-02-02T08-47-01.875874.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_02_02T08_47_01.875874", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-02T08-47-01.875874.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-02-02T08-47-01.875874.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-02-02T08-47-01.875874.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-02-02T08-47-01.875874.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-02T08-47-01.875874.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-02-02T08-47-01.875874.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-02-02T08-47-01.875874.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-02-02T08-47-01.875874.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-02-02T08-47-01.875874.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-02-02T08-47-01.875874.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-02-02T08-47-01.875874.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-02-02T08-47-01.875874.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-02T08-47-01.875874.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-02-02T08-47-01.875874.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-02T08-47-01.875874.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-02T08-47-01.875874.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-02-02T08-47-01.875874.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-02-02T08-47-01.875874.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-02-02T08-47-01.875874.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-02T08-47-01.875874.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-02T08-47-01.875874.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-02T08-47-01.875874.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-02-02T08-47-01.875874.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-02T08-47-01.875874.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-02T08-47-01.875874.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-02T08-47-01.875874.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-02T08-47-01.875874.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-02-02T08-47-01.875874.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-02T08-47-01.875874.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-02T08-47-01.875874.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-02T08-47-01.875874.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-02T08-47-01.875874.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-02-02T08-47-01.875874.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-02-02T08-47-01.875874.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-02-02T08-47-01.875874.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-02-02T08-47-01.875874.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-02T08-47-01.875874.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-02-02T08-47-01.875874.parquet", "**/details_harness|hendrycksTest-management|5_2024-02-02T08-47-01.875874.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-02-02T08-47-01.875874.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-02-02T08-47-01.875874.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-02-02T08-47-01.875874.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-02-02T08-47-01.875874.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-02T08-47-01.875874.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-02-02T08-47-01.875874.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-02-02T08-47-01.875874.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-02-02T08-47-01.875874.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-02-02T08-47-01.875874.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-02-02T08-47-01.875874.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-02-02T08-47-01.875874.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-02-02T08-47-01.875874.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-02-02T08-47-01.875874.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-02-02T08-47-01.875874.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-02-02T08-47-01.875874.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-02T08-47-01.875874.parquet", "**/details_harness|hendrycksTest-virology|5_2024-02-02T08-47-01.875874.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-02-02T08-47-01.875874.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-02T08-47-01.875874.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-02-02T08-47-01.875874.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-02-02T08-47-01.875874.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-02-02T08-47-01.875874.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-02T08-47-01.875874.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-02-02T08-47-01.875874.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-02-02T08-47-01.875874.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-02-02T08-47-01.875874.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-02-02T08-47-01.875874.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-02-02T08-47-01.875874.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-02-02T08-47-01.875874.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-02-02T08-47-01.875874.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-02T08-47-01.875874.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-02-02T08-47-01.875874.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-02T08-47-01.875874.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-02T08-47-01.875874.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-02-02T08-47-01.875874.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-02-02T08-47-01.875874.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-02-02T08-47-01.875874.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-02T08-47-01.875874.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-02T08-47-01.875874.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-02T08-47-01.875874.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-02-02T08-47-01.875874.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-02T08-47-01.875874.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-02T08-47-01.875874.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-02T08-47-01.875874.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-02T08-47-01.875874.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-02-02T08-47-01.875874.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-02T08-47-01.875874.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-02T08-47-01.875874.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-02T08-47-01.875874.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-02T08-47-01.875874.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-02-02T08-47-01.875874.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-02-02T08-47-01.875874.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-02-02T08-47-01.875874.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-02-02T08-47-01.875874.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-02T08-47-01.875874.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-02-02T08-47-01.875874.parquet", "**/details_harness|hendrycksTest-management|5_2024-02-02T08-47-01.875874.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-02-02T08-47-01.875874.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-02-02T08-47-01.875874.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-02-02T08-47-01.875874.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-02-02T08-47-01.875874.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-02T08-47-01.875874.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-02-02T08-47-01.875874.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-02-02T08-47-01.875874.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-02-02T08-47-01.875874.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-02-02T08-47-01.875874.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-02-02T08-47-01.875874.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-02-02T08-47-01.875874.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-02-02T08-47-01.875874.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-02-02T08-47-01.875874.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-02-02T08-47-01.875874.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-02-02T08-47-01.875874.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-02T08-47-01.875874.parquet", "**/details_harness|hendrycksTest-virology|5_2024-02-02T08-47-01.875874.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-02-02T08-47-01.875874.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_02_02T08_47_01.875874", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-02T08-47-01.875874.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-02T08-47-01.875874.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_02_02T08_47_01.875874", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-02-02T08-47-01.875874.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-02-02T08-47-01.875874.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_02_02T08_47_01.875874", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-02-02T08-47-01.875874.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-02-02T08-47-01.875874.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_02_02T08_47_01.875874", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-02-02T08-47-01.875874.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-02-02T08-47-01.875874.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_02_02T08_47_01.875874", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-02T08-47-01.875874.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-02T08-47-01.875874.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_02_02T08_47_01.875874", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-02-02T08-47-01.875874.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-02-02T08-47-01.875874.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_02_02T08_47_01.875874", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-02-02T08-47-01.875874.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-02-02T08-47-01.875874.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_02_02T08_47_01.875874", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-02-02T08-47-01.875874.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-02-02T08-47-01.875874.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_02_02T08_47_01.875874", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-02-02T08-47-01.875874.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-02-02T08-47-01.875874.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_02_02T08_47_01.875874", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-02-02T08-47-01.875874.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-02-02T08-47-01.875874.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_02_02T08_47_01.875874", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-02-02T08-47-01.875874.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-02-02T08-47-01.875874.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_02_02T08_47_01.875874", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-02-02T08-47-01.875874.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-02-02T08-47-01.875874.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_02_02T08_47_01.875874", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-02T08-47-01.875874.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-02T08-47-01.875874.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_02_02T08_47_01.875874", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-02-02T08-47-01.875874.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-02-02T08-47-01.875874.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_02_02T08_47_01.875874", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-02T08-47-01.875874.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-02T08-47-01.875874.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_02_02T08_47_01.875874", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-02T08-47-01.875874.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-02T08-47-01.875874.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_02_02T08_47_01.875874", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-02-02T08-47-01.875874.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-02-02T08-47-01.875874.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_02_02T08_47_01.875874", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-02-02T08-47-01.875874.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-02-02T08-47-01.875874.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_02_02T08_47_01.875874", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-02-02T08-47-01.875874.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-02-02T08-47-01.875874.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_02_02T08_47_01.875874", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-02T08-47-01.875874.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-02T08-47-01.875874.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_02_02T08_47_01.875874", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-02T08-47-01.875874.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-02T08-47-01.875874.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_02_02T08_47_01.875874", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-02T08-47-01.875874.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-02T08-47-01.875874.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_02_02T08_47_01.875874", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-02-02T08-47-01.875874.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-02-02T08-47-01.875874.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_02_02T08_47_01.875874", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-02T08-47-01.875874.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-02T08-47-01.875874.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_02_02T08_47_01.875874", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-02T08-47-01.875874.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-02T08-47-01.875874.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_02_02T08_47_01.875874", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-02T08-47-01.875874.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-02T08-47-01.875874.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_02_02T08_47_01.875874", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-02T08-47-01.875874.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-02T08-47-01.875874.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_02_02T08_47_01.875874", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-02-02T08-47-01.875874.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-02-02T08-47-01.875874.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_02_02T08_47_01.875874", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-02T08-47-01.875874.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-02T08-47-01.875874.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_02_02T08_47_01.875874", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-02T08-47-01.875874.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-02T08-47-01.875874.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_02_02T08_47_01.875874", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-02T08-47-01.875874.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-02T08-47-01.875874.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_02_02T08_47_01.875874", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-02T08-47-01.875874.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-02T08-47-01.875874.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_02_02T08_47_01.875874", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-02-02T08-47-01.875874.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-02-02T08-47-01.875874.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_02_02T08_47_01.875874", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-02-02T08-47-01.875874.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-02-02T08-47-01.875874.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_02_02T08_47_01.875874", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-02-02T08-47-01.875874.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-02-02T08-47-01.875874.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_02_02T08_47_01.875874", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-02-02T08-47-01.875874.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-02-02T08-47-01.875874.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_02_02T08_47_01.875874", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-02T08-47-01.875874.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-02T08-47-01.875874.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_02_02T08_47_01.875874", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-02-02T08-47-01.875874.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-02-02T08-47-01.875874.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_02_02T08_47_01.875874", "path": ["**/details_harness|hendrycksTest-management|5_2024-02-02T08-47-01.875874.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-02-02T08-47-01.875874.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_02_02T08_47_01.875874", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-02-02T08-47-01.875874.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-02-02T08-47-01.875874.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_02_02T08_47_01.875874", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-02-02T08-47-01.875874.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-02-02T08-47-01.875874.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_02_02T08_47_01.875874", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-02-02T08-47-01.875874.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-02-02T08-47-01.875874.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_02_02T08_47_01.875874", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-02-02T08-47-01.875874.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-02-02T08-47-01.875874.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_02_02T08_47_01.875874", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-02T08-47-01.875874.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-02T08-47-01.875874.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_02_02T08_47_01.875874", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-02-02T08-47-01.875874.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-02-02T08-47-01.875874.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_02_02T08_47_01.875874", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-02-02T08-47-01.875874.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-02-02T08-47-01.875874.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_02_02T08_47_01.875874", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-02-02T08-47-01.875874.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-02-02T08-47-01.875874.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_02_02T08_47_01.875874", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-02-02T08-47-01.875874.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-02-02T08-47-01.875874.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_02_02T08_47_01.875874", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-02-02T08-47-01.875874.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-02-02T08-47-01.875874.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_02_02T08_47_01.875874", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-02-02T08-47-01.875874.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-02-02T08-47-01.875874.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_02_02T08_47_01.875874", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-02-02T08-47-01.875874.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-02-02T08-47-01.875874.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_02_02T08_47_01.875874", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-02-02T08-47-01.875874.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-02-02T08-47-01.875874.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_02_02T08_47_01.875874", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-02-02T08-47-01.875874.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-02-02T08-47-01.875874.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_02_02T08_47_01.875874", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-02-02T08-47-01.875874.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-02-02T08-47-01.875874.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_02_02T08_47_01.875874", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-02T08-47-01.875874.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-02T08-47-01.875874.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_02_02T08_47_01.875874", "path": ["**/details_harness|hendrycksTest-virology|5_2024-02-02T08-47-01.875874.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-02-02T08-47-01.875874.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_02_02T08_47_01.875874", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-02-02T08-47-01.875874.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-02-02T08-47-01.875874.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_02_02T08_47_01.875874", "path": ["**/details_harness|truthfulqa:mc|0_2024-02-02T08-47-01.875874.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-02-02T08-47-01.875874.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_02_02T08_47_01.875874", "path": ["**/details_harness|winogrande|5_2024-02-02T08-47-01.875874.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-02-02T08-47-01.875874.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_02_02T08_47_01.875874", "path": ["results_2024-02-02T08-47-01.875874.parquet"]}, {"split": "latest", "path": ["results_2024-02-02T08-47-01.875874.parquet"]}]}]}
2024-02-02T08:49:44+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of DreadPoor/NewtoccineLake-slerp-7B Dataset automatically created during the evaluation run of model DreadPoor/NewtoccineLake-slerp-7B on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-02-02T08:47:01.875874(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of DreadPoor/NewtoccineLake-slerp-7B\n\n\n\nDataset automatically created during the evaluation run of model DreadPoor/NewtoccineLake-slerp-7B on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-02-02T08:47:01.875874(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of DreadPoor/NewtoccineLake-slerp-7B\n\n\n\nDataset automatically created during the evaluation run of model DreadPoor/NewtoccineLake-slerp-7B on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-02-02T08:47:01.875874(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
841f6d8b47e02c7c14fe610eb2d1027841c81150
language: - bo tags: - audio - automatic-speech-recognition license: other
openpecha/tibetan_voice_v3
[ "region:us" ]
2024-02-02T08:52:38+00:00
{}
2024-02-02T09:58:29+00:00
[]
[]
TAGS #region-us
language: - bo tags: - audio - automatic-speech-recognition license: other
[]
[ "TAGS\n#region-us \n" ]
aed8001511ddf561beaacbd098e67e24c15f0d81
# Dataset Card for Evaluation run of Technoculture/Medmerge-tulu-70b <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [Technoculture/Medmerge-tulu-70b](https://huggingface.co/Technoculture/Medmerge-tulu-70b) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_Technoculture__Medmerge-tulu-70b", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-02-02T08:57:05.202526](https://huggingface.co/datasets/open-llm-leaderboard/details_Technoculture__Medmerge-tulu-70b/blob/main/results_2024-02-02T08-57-05.202526.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.6995004072032712, "acc_stderr": 0.030105137468387656, "acc_norm": 0.7034970253233567, "acc_norm_stderr": 0.030687587546575825, "mc1": 0.33659730722154224, "mc1_stderr": 0.01654241280949489, "mc2": 0.47893467153483676, "mc2_stderr": 0.01415354521385091 }, "harness|arc:challenge|25": { "acc": 0.6331058020477816, "acc_stderr": 0.014084133118104298, "acc_norm": 0.674061433447099, "acc_norm_stderr": 0.013697432466693247 }, "harness|hellaswag|10": { "acc": 0.6789484166500697, "acc_stderr": 0.004659263952756616, "acc_norm": 0.8746265684126668, "acc_norm_stderr": 0.0033046510372765517 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.31, "acc_stderr": 0.04648231987117316, "acc_norm": 0.31, "acc_norm_stderr": 0.04648231987117316 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.6666666666666666, "acc_stderr": 0.04072314811876837, "acc_norm": 0.6666666666666666, "acc_norm_stderr": 0.04072314811876837 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.8092105263157895, "acc_stderr": 0.031975658210325, "acc_norm": 0.8092105263157895, "acc_norm_stderr": 0.031975658210325 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.75, "acc_stderr": 0.04351941398892446, "acc_norm": 0.75, "acc_norm_stderr": 0.04351941398892446 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.720754716981132, "acc_stderr": 0.027611163402399715, "acc_norm": 0.720754716981132, "acc_norm_stderr": 0.027611163402399715 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.8472222222222222, "acc_stderr": 0.030085743248565663, "acc_norm": 0.8472222222222222, "acc_norm_stderr": 0.030085743248565663 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.53, "acc_stderr": 0.050161355804659205, "acc_norm": 0.53, "acc_norm_stderr": 0.050161355804659205 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.61, "acc_stderr": 0.04902071300001974, "acc_norm": 0.61, "acc_norm_stderr": 0.04902071300001974 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.38, "acc_stderr": 0.048783173121456316, "acc_norm": 0.38, "acc_norm_stderr": 0.048783173121456316 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.6416184971098265, "acc_stderr": 0.03656343653353159, "acc_norm": 0.6416184971098265, "acc_norm_stderr": 0.03656343653353159 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.35294117647058826, "acc_stderr": 0.04755129616062947, "acc_norm": 0.35294117647058826, "acc_norm_stderr": 0.04755129616062947 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.78, "acc_stderr": 0.04163331998932262, "acc_norm": 0.78, "acc_norm_stderr": 0.04163331998932262 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.676595744680851, "acc_stderr": 0.030579442773610334, "acc_norm": 0.676595744680851, "acc_norm_stderr": 0.030579442773610334 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.4298245614035088, "acc_stderr": 0.046570472605949625, "acc_norm": 0.4298245614035088, "acc_norm_stderr": 0.046570472605949625 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.6620689655172414, "acc_stderr": 0.039417076320648906, "acc_norm": 0.6620689655172414, "acc_norm_stderr": 0.039417076320648906 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.46296296296296297, "acc_stderr": 0.025680564640056882, "acc_norm": 0.46296296296296297, "acc_norm_stderr": 0.025680564640056882 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.48412698412698413, "acc_stderr": 0.04469881854072606, "acc_norm": 0.48412698412698413, "acc_norm_stderr": 0.04469881854072606 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.5, "acc_stderr": 0.050251890762960605, "acc_norm": 0.5, "acc_norm_stderr": 0.050251890762960605 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.8193548387096774, "acc_stderr": 0.021886178567172534, "acc_norm": 0.8193548387096774, "acc_norm_stderr": 0.021886178567172534 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.5270935960591133, "acc_stderr": 0.03512819077876106, "acc_norm": 0.5270935960591133, "acc_norm_stderr": 0.03512819077876106 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.76, "acc_stderr": 0.042923469599092816, "acc_norm": 0.76, "acc_norm_stderr": 0.042923469599092816 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.8242424242424242, "acc_stderr": 0.02972094300622445, "acc_norm": 0.8242424242424242, "acc_norm_stderr": 0.02972094300622445 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.8787878787878788, "acc_stderr": 0.02325315795194209, "acc_norm": 0.8787878787878788, "acc_norm_stderr": 0.02325315795194209 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.9430051813471503, "acc_stderr": 0.016731085293607555, "acc_norm": 0.9430051813471503, "acc_norm_stderr": 0.016731085293607555 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.7205128205128205, "acc_stderr": 0.022752388839776823, "acc_norm": 0.7205128205128205, "acc_norm_stderr": 0.022752388839776823 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.34074074074074073, "acc_stderr": 0.02889774874113114, "acc_norm": 0.34074074074074073, "acc_norm_stderr": 0.02889774874113114 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.7647058823529411, "acc_stderr": 0.02755361446786381, "acc_norm": 0.7647058823529411, "acc_norm_stderr": 0.02755361446786381 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.423841059602649, "acc_stderr": 0.04034846678603397, "acc_norm": 0.423841059602649, "acc_norm_stderr": 0.04034846678603397 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.8788990825688073, "acc_stderr": 0.013987618292389713, "acc_norm": 0.8788990825688073, "acc_norm_stderr": 0.013987618292389713 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.6111111111111112, "acc_stderr": 0.033247089118091176, "acc_norm": 0.6111111111111112, "acc_norm_stderr": 0.033247089118091176 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.9166666666666666, "acc_stderr": 0.019398452135813905, "acc_norm": 0.9166666666666666, "acc_norm_stderr": 0.019398452135813905 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.8818565400843882, "acc_stderr": 0.021011052659878463, "acc_norm": 0.8818565400843882, "acc_norm_stderr": 0.021011052659878463 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.7937219730941704, "acc_stderr": 0.02715715047956382, "acc_norm": 0.7937219730941704, "acc_norm_stderr": 0.02715715047956382 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.8931297709923665, "acc_stderr": 0.027096548624883733, "acc_norm": 0.8931297709923665, "acc_norm_stderr": 0.027096548624883733 }, "harness|hendrycksTest-international_law|5": { "acc": 0.8760330578512396, "acc_stderr": 0.03008309871603521, "acc_norm": 0.8760330578512396, "acc_norm_stderr": 0.03008309871603521 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.8333333333333334, "acc_stderr": 0.036028141763926456, "acc_norm": 0.8333333333333334, "acc_norm_stderr": 0.036028141763926456 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.8159509202453987, "acc_stderr": 0.03044677768797173, "acc_norm": 0.8159509202453987, "acc_norm_stderr": 0.03044677768797173 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.5178571428571429, "acc_stderr": 0.047427623612430116, "acc_norm": 0.5178571428571429, "acc_norm_stderr": 0.047427623612430116 }, "harness|hendrycksTest-management|5": { "acc": 0.8155339805825242, "acc_stderr": 0.03840423627288276, "acc_norm": 0.8155339805825242, "acc_norm_stderr": 0.03840423627288276 }, "harness|hendrycksTest-marketing|5": { "acc": 0.8974358974358975, "acc_stderr": 0.01987565502786746, "acc_norm": 0.8974358974358975, "acc_norm_stderr": 0.01987565502786746 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.76, "acc_stderr": 0.04292346959909282, "acc_norm": 0.76, "acc_norm_stderr": 0.04292346959909282 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.8671775223499362, "acc_stderr": 0.012136303209884564, "acc_norm": 0.8671775223499362, "acc_norm_stderr": 0.012136303209884564 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.7861271676300579, "acc_stderr": 0.022075709251757177, "acc_norm": 0.7861271676300579, "acc_norm_stderr": 0.022075709251757177 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.4547486033519553, "acc_stderr": 0.016653875777524002, "acc_norm": 0.4547486033519553, "acc_norm_stderr": 0.016653875777524002 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.7745098039215687, "acc_stderr": 0.023929155517351284, "acc_norm": 0.7745098039215687, "acc_norm_stderr": 0.023929155517351284 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.7942122186495176, "acc_stderr": 0.022961339906764244, "acc_norm": 0.7942122186495176, "acc_norm_stderr": 0.022961339906764244 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.845679012345679, "acc_stderr": 0.02010083099985099, "acc_norm": 0.845679012345679, "acc_norm_stderr": 0.02010083099985099 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.5602836879432624, "acc_stderr": 0.029609912075594116, "acc_norm": 0.5602836879432624, "acc_norm_stderr": 0.029609912075594116 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.5352020860495437, "acc_stderr": 0.012738547371303963, "acc_norm": 0.5352020860495437, "acc_norm_stderr": 0.012738547371303963 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.7573529411764706, "acc_stderr": 0.026040662474201257, "acc_norm": 0.7573529411764706, "acc_norm_stderr": 0.026040662474201257 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.7581699346405228, "acc_stderr": 0.017322789207784326, "acc_norm": 0.7581699346405228, "acc_norm_stderr": 0.017322789207784326 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.7090909090909091, "acc_stderr": 0.043502714429232425, "acc_norm": 0.7090909090909091, "acc_norm_stderr": 0.043502714429232425 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.8040816326530612, "acc_stderr": 0.025409301953225678, "acc_norm": 0.8040816326530612, "acc_norm_stderr": 0.025409301953225678 }, "harness|hendrycksTest-sociology|5": { "acc": 0.9054726368159204, "acc_stderr": 0.02068718695153409, "acc_norm": 0.9054726368159204, "acc_norm_stderr": 0.02068718695153409 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.92, "acc_stderr": 0.0272659924344291, "acc_norm": 0.92, "acc_norm_stderr": 0.0272659924344291 }, "harness|hendrycksTest-virology|5": { "acc": 0.5180722891566265, "acc_stderr": 0.038899512528272166, "acc_norm": 0.5180722891566265, "acc_norm_stderr": 0.038899512528272166 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.8596491228070176, "acc_stderr": 0.0266405825391332, "acc_norm": 0.8596491228070176, "acc_norm_stderr": 0.0266405825391332 }, "harness|truthfulqa:mc|0": { "mc1": 0.33659730722154224, "mc1_stderr": 0.01654241280949489, "mc2": 0.47893467153483676, "mc2_stderr": 0.01415354521385091 }, "harness|winogrande|5": { "acc": 0.8342541436464088, "acc_stderr": 0.010450899545370625 }, "harness|gsm8k|5": { "acc": 0.5655799848369977, "acc_stderr": 0.013653507211411418 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_Technoculture__Medmerge-tulu-70b
[ "region:us" ]
2024-02-02T08:59:28+00:00
{"pretty_name": "Evaluation run of Technoculture/Medmerge-tulu-70b", "dataset_summary": "Dataset automatically created during the evaluation run of model [Technoculture/Medmerge-tulu-70b](https://huggingface.co/Technoculture/Medmerge-tulu-70b) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_Technoculture__Medmerge-tulu-70b\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-02-02T08:57:05.202526](https://huggingface.co/datasets/open-llm-leaderboard/details_Technoculture__Medmerge-tulu-70b/blob/main/results_2024-02-02T08-57-05.202526.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.6995004072032712,\n \"acc_stderr\": 0.030105137468387656,\n \"acc_norm\": 0.7034970253233567,\n \"acc_norm_stderr\": 0.030687587546575825,\n \"mc1\": 0.33659730722154224,\n \"mc1_stderr\": 0.01654241280949489,\n \"mc2\": 0.47893467153483676,\n \"mc2_stderr\": 0.01415354521385091\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.6331058020477816,\n \"acc_stderr\": 0.014084133118104298,\n \"acc_norm\": 0.674061433447099,\n \"acc_norm_stderr\": 0.013697432466693247\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.6789484166500697,\n \"acc_stderr\": 0.004659263952756616,\n \"acc_norm\": 0.8746265684126668,\n \"acc_norm_stderr\": 0.0033046510372765517\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.31,\n \"acc_stderr\": 0.04648231987117316,\n \"acc_norm\": 0.31,\n \"acc_norm_stderr\": 0.04648231987117316\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.6666666666666666,\n \"acc_stderr\": 0.04072314811876837,\n \"acc_norm\": 0.6666666666666666,\n \"acc_norm_stderr\": 0.04072314811876837\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.8092105263157895,\n \"acc_stderr\": 0.031975658210325,\n \"acc_norm\": 0.8092105263157895,\n \"acc_norm_stderr\": 0.031975658210325\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.75,\n \"acc_stderr\": 0.04351941398892446,\n \"acc_norm\": 0.75,\n \"acc_norm_stderr\": 0.04351941398892446\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.720754716981132,\n \"acc_stderr\": 0.027611163402399715,\n \"acc_norm\": 0.720754716981132,\n \"acc_norm_stderr\": 0.027611163402399715\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.8472222222222222,\n \"acc_stderr\": 0.030085743248565663,\n \"acc_norm\": 0.8472222222222222,\n \"acc_norm_stderr\": 0.030085743248565663\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.53,\n \"acc_stderr\": 0.050161355804659205,\n \"acc_norm\": 0.53,\n \"acc_norm_stderr\": 0.050161355804659205\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.61,\n \"acc_stderr\": 0.04902071300001974,\n \"acc_norm\": 0.61,\n \"acc_norm_stderr\": 0.04902071300001974\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.38,\n \"acc_stderr\": 0.048783173121456316,\n \"acc_norm\": 0.38,\n \"acc_norm_stderr\": 0.048783173121456316\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.6416184971098265,\n \"acc_stderr\": 0.03656343653353159,\n \"acc_norm\": 0.6416184971098265,\n \"acc_norm_stderr\": 0.03656343653353159\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.35294117647058826,\n \"acc_stderr\": 0.04755129616062947,\n \"acc_norm\": 0.35294117647058826,\n \"acc_norm_stderr\": 0.04755129616062947\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.78,\n \"acc_stderr\": 0.04163331998932262,\n \"acc_norm\": 0.78,\n \"acc_norm_stderr\": 0.04163331998932262\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.676595744680851,\n \"acc_stderr\": 0.030579442773610334,\n \"acc_norm\": 0.676595744680851,\n \"acc_norm_stderr\": 0.030579442773610334\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.4298245614035088,\n \"acc_stderr\": 0.046570472605949625,\n \"acc_norm\": 0.4298245614035088,\n \"acc_norm_stderr\": 0.046570472605949625\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.6620689655172414,\n \"acc_stderr\": 0.039417076320648906,\n \"acc_norm\": 0.6620689655172414,\n \"acc_norm_stderr\": 0.039417076320648906\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.46296296296296297,\n \"acc_stderr\": 0.025680564640056882,\n \"acc_norm\": 0.46296296296296297,\n \"acc_norm_stderr\": 0.025680564640056882\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.48412698412698413,\n \"acc_stderr\": 0.04469881854072606,\n \"acc_norm\": 0.48412698412698413,\n \"acc_norm_stderr\": 0.04469881854072606\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.5,\n \"acc_stderr\": 0.050251890762960605,\n \"acc_norm\": 0.5,\n \"acc_norm_stderr\": 0.050251890762960605\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.8193548387096774,\n \"acc_stderr\": 0.021886178567172534,\n \"acc_norm\": 0.8193548387096774,\n \"acc_norm_stderr\": 0.021886178567172534\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.5270935960591133,\n \"acc_stderr\": 0.03512819077876106,\n \"acc_norm\": 0.5270935960591133,\n \"acc_norm_stderr\": 0.03512819077876106\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.76,\n \"acc_stderr\": 0.042923469599092816,\n \"acc_norm\": 0.76,\n \"acc_norm_stderr\": 0.042923469599092816\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.8242424242424242,\n \"acc_stderr\": 0.02972094300622445,\n \"acc_norm\": 0.8242424242424242,\n \"acc_norm_stderr\": 0.02972094300622445\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.8787878787878788,\n \"acc_stderr\": 0.02325315795194209,\n \"acc_norm\": 0.8787878787878788,\n \"acc_norm_stderr\": 0.02325315795194209\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.9430051813471503,\n \"acc_stderr\": 0.016731085293607555,\n \"acc_norm\": 0.9430051813471503,\n \"acc_norm_stderr\": 0.016731085293607555\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.7205128205128205,\n \"acc_stderr\": 0.022752388839776823,\n \"acc_norm\": 0.7205128205128205,\n \"acc_norm_stderr\": 0.022752388839776823\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.34074074074074073,\n \"acc_stderr\": 0.02889774874113114,\n \"acc_norm\": 0.34074074074074073,\n \"acc_norm_stderr\": 0.02889774874113114\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.7647058823529411,\n \"acc_stderr\": 0.02755361446786381,\n \"acc_norm\": 0.7647058823529411,\n \"acc_norm_stderr\": 0.02755361446786381\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.423841059602649,\n \"acc_stderr\": 0.04034846678603397,\n \"acc_norm\": 0.423841059602649,\n \"acc_norm_stderr\": 0.04034846678603397\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.8788990825688073,\n \"acc_stderr\": 0.013987618292389713,\n \"acc_norm\": 0.8788990825688073,\n \"acc_norm_stderr\": 0.013987618292389713\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.6111111111111112,\n \"acc_stderr\": 0.033247089118091176,\n \"acc_norm\": 0.6111111111111112,\n \"acc_norm_stderr\": 0.033247089118091176\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.9166666666666666,\n \"acc_stderr\": 0.019398452135813905,\n \"acc_norm\": 0.9166666666666666,\n \"acc_norm_stderr\": 0.019398452135813905\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.8818565400843882,\n \"acc_stderr\": 0.021011052659878463,\n \"acc_norm\": 0.8818565400843882,\n \"acc_norm_stderr\": 0.021011052659878463\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.7937219730941704,\n \"acc_stderr\": 0.02715715047956382,\n \"acc_norm\": 0.7937219730941704,\n \"acc_norm_stderr\": 0.02715715047956382\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.8931297709923665,\n \"acc_stderr\": 0.027096548624883733,\n \"acc_norm\": 0.8931297709923665,\n \"acc_norm_stderr\": 0.027096548624883733\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.8760330578512396,\n \"acc_stderr\": 0.03008309871603521,\n \"acc_norm\": 0.8760330578512396,\n \"acc_norm_stderr\": 0.03008309871603521\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.8333333333333334,\n \"acc_stderr\": 0.036028141763926456,\n \"acc_norm\": 0.8333333333333334,\n \"acc_norm_stderr\": 0.036028141763926456\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.8159509202453987,\n \"acc_stderr\": 0.03044677768797173,\n \"acc_norm\": 0.8159509202453987,\n \"acc_norm_stderr\": 0.03044677768797173\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.5178571428571429,\n \"acc_stderr\": 0.047427623612430116,\n \"acc_norm\": 0.5178571428571429,\n \"acc_norm_stderr\": 0.047427623612430116\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.8155339805825242,\n \"acc_stderr\": 0.03840423627288276,\n \"acc_norm\": 0.8155339805825242,\n \"acc_norm_stderr\": 0.03840423627288276\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8974358974358975,\n \"acc_stderr\": 0.01987565502786746,\n \"acc_norm\": 0.8974358974358975,\n \"acc_norm_stderr\": 0.01987565502786746\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.76,\n \"acc_stderr\": 0.04292346959909282,\n \"acc_norm\": 0.76,\n \"acc_norm_stderr\": 0.04292346959909282\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.8671775223499362,\n \"acc_stderr\": 0.012136303209884564,\n \"acc_norm\": 0.8671775223499362,\n \"acc_norm_stderr\": 0.012136303209884564\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.7861271676300579,\n \"acc_stderr\": 0.022075709251757177,\n \"acc_norm\": 0.7861271676300579,\n \"acc_norm_stderr\": 0.022075709251757177\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.4547486033519553,\n \"acc_stderr\": 0.016653875777524002,\n \"acc_norm\": 0.4547486033519553,\n \"acc_norm_stderr\": 0.016653875777524002\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.7745098039215687,\n \"acc_stderr\": 0.023929155517351284,\n \"acc_norm\": 0.7745098039215687,\n \"acc_norm_stderr\": 0.023929155517351284\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.7942122186495176,\n \"acc_stderr\": 0.022961339906764244,\n \"acc_norm\": 0.7942122186495176,\n \"acc_norm_stderr\": 0.022961339906764244\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.845679012345679,\n \"acc_stderr\": 0.02010083099985099,\n \"acc_norm\": 0.845679012345679,\n \"acc_norm_stderr\": 0.02010083099985099\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.5602836879432624,\n \"acc_stderr\": 0.029609912075594116,\n \"acc_norm\": 0.5602836879432624,\n \"acc_norm_stderr\": 0.029609912075594116\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.5352020860495437,\n \"acc_stderr\": 0.012738547371303963,\n \"acc_norm\": 0.5352020860495437,\n \"acc_norm_stderr\": 0.012738547371303963\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.7573529411764706,\n \"acc_stderr\": 0.026040662474201257,\n \"acc_norm\": 0.7573529411764706,\n \"acc_norm_stderr\": 0.026040662474201257\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.7581699346405228,\n \"acc_stderr\": 0.017322789207784326,\n \"acc_norm\": 0.7581699346405228,\n \"acc_norm_stderr\": 0.017322789207784326\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.7090909090909091,\n \"acc_stderr\": 0.043502714429232425,\n \"acc_norm\": 0.7090909090909091,\n \"acc_norm_stderr\": 0.043502714429232425\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.8040816326530612,\n \"acc_stderr\": 0.025409301953225678,\n \"acc_norm\": 0.8040816326530612,\n \"acc_norm_stderr\": 0.025409301953225678\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.9054726368159204,\n \"acc_stderr\": 0.02068718695153409,\n \"acc_norm\": 0.9054726368159204,\n \"acc_norm_stderr\": 0.02068718695153409\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.92,\n \"acc_stderr\": 0.0272659924344291,\n \"acc_norm\": 0.92,\n \"acc_norm_stderr\": 0.0272659924344291\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.5180722891566265,\n \"acc_stderr\": 0.038899512528272166,\n \"acc_norm\": 0.5180722891566265,\n \"acc_norm_stderr\": 0.038899512528272166\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.8596491228070176,\n \"acc_stderr\": 0.0266405825391332,\n \"acc_norm\": 0.8596491228070176,\n \"acc_norm_stderr\": 0.0266405825391332\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.33659730722154224,\n \"mc1_stderr\": 0.01654241280949489,\n \"mc2\": 0.47893467153483676,\n \"mc2_stderr\": 0.01415354521385091\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.8342541436464088,\n \"acc_stderr\": 0.010450899545370625\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.5655799848369977,\n \"acc_stderr\": 0.013653507211411418\n }\n}\n```", "repo_url": "https://huggingface.co/Technoculture/Medmerge-tulu-70b", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_02_02T08_57_05.202526", "path": ["**/details_harness|arc:challenge|25_2024-02-02T08-57-05.202526.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-02-02T08-57-05.202526.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_02_02T08_57_05.202526", "path": ["**/details_harness|gsm8k|5_2024-02-02T08-57-05.202526.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-02-02T08-57-05.202526.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_02_02T08_57_05.202526", "path": ["**/details_harness|hellaswag|10_2024-02-02T08-57-05.202526.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-02-02T08-57-05.202526.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_02_02T08_57_05.202526", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-02T08-57-05.202526.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-02-02T08-57-05.202526.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-02-02T08-57-05.202526.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-02-02T08-57-05.202526.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-02T08-57-05.202526.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-02-02T08-57-05.202526.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-02-02T08-57-05.202526.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-02-02T08-57-05.202526.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-02-02T08-57-05.202526.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-02-02T08-57-05.202526.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-02-02T08-57-05.202526.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-02-02T08-57-05.202526.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-02T08-57-05.202526.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-02-02T08-57-05.202526.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-02T08-57-05.202526.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-02T08-57-05.202526.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-02-02T08-57-05.202526.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-02-02T08-57-05.202526.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-02-02T08-57-05.202526.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-02T08-57-05.202526.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-02T08-57-05.202526.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-02T08-57-05.202526.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-02-02T08-57-05.202526.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-02T08-57-05.202526.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-02T08-57-05.202526.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-02T08-57-05.202526.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-02T08-57-05.202526.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-02-02T08-57-05.202526.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-02T08-57-05.202526.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-02T08-57-05.202526.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-02T08-57-05.202526.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-02T08-57-05.202526.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-02-02T08-57-05.202526.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-02-02T08-57-05.202526.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-02-02T08-57-05.202526.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-02-02T08-57-05.202526.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-02T08-57-05.202526.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-02-02T08-57-05.202526.parquet", "**/details_harness|hendrycksTest-management|5_2024-02-02T08-57-05.202526.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-02-02T08-57-05.202526.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-02-02T08-57-05.202526.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-02-02T08-57-05.202526.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-02-02T08-57-05.202526.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-02T08-57-05.202526.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-02-02T08-57-05.202526.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-02-02T08-57-05.202526.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-02-02T08-57-05.202526.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-02-02T08-57-05.202526.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-02-02T08-57-05.202526.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-02-02T08-57-05.202526.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-02-02T08-57-05.202526.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-02-02T08-57-05.202526.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-02-02T08-57-05.202526.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-02-02T08-57-05.202526.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-02T08-57-05.202526.parquet", "**/details_harness|hendrycksTest-virology|5_2024-02-02T08-57-05.202526.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-02-02T08-57-05.202526.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-02T08-57-05.202526.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-02-02T08-57-05.202526.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-02-02T08-57-05.202526.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-02-02T08-57-05.202526.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-02T08-57-05.202526.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-02-02T08-57-05.202526.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-02-02T08-57-05.202526.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-02-02T08-57-05.202526.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-02-02T08-57-05.202526.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-02-02T08-57-05.202526.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-02-02T08-57-05.202526.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-02-02T08-57-05.202526.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-02T08-57-05.202526.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-02-02T08-57-05.202526.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-02T08-57-05.202526.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-02T08-57-05.202526.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-02-02T08-57-05.202526.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-02-02T08-57-05.202526.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-02-02T08-57-05.202526.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-02T08-57-05.202526.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-02T08-57-05.202526.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-02T08-57-05.202526.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-02-02T08-57-05.202526.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-02T08-57-05.202526.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-02T08-57-05.202526.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-02T08-57-05.202526.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-02T08-57-05.202526.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-02-02T08-57-05.202526.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-02T08-57-05.202526.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-02T08-57-05.202526.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-02T08-57-05.202526.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-02T08-57-05.202526.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-02-02T08-57-05.202526.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-02-02T08-57-05.202526.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-02-02T08-57-05.202526.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-02-02T08-57-05.202526.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-02T08-57-05.202526.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-02-02T08-57-05.202526.parquet", "**/details_harness|hendrycksTest-management|5_2024-02-02T08-57-05.202526.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-02-02T08-57-05.202526.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-02-02T08-57-05.202526.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-02-02T08-57-05.202526.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-02-02T08-57-05.202526.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-02T08-57-05.202526.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-02-02T08-57-05.202526.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-02-02T08-57-05.202526.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-02-02T08-57-05.202526.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-02-02T08-57-05.202526.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-02-02T08-57-05.202526.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-02-02T08-57-05.202526.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-02-02T08-57-05.202526.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-02-02T08-57-05.202526.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-02-02T08-57-05.202526.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-02-02T08-57-05.202526.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-02T08-57-05.202526.parquet", "**/details_harness|hendrycksTest-virology|5_2024-02-02T08-57-05.202526.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-02-02T08-57-05.202526.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_02_02T08_57_05.202526", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-02T08-57-05.202526.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-02T08-57-05.202526.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_02_02T08_57_05.202526", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-02-02T08-57-05.202526.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-02-02T08-57-05.202526.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_02_02T08_57_05.202526", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-02-02T08-57-05.202526.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-02-02T08-57-05.202526.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_02_02T08_57_05.202526", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-02-02T08-57-05.202526.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-02-02T08-57-05.202526.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_02_02T08_57_05.202526", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-02T08-57-05.202526.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-02T08-57-05.202526.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_02_02T08_57_05.202526", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-02-02T08-57-05.202526.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-02-02T08-57-05.202526.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_02_02T08_57_05.202526", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-02-02T08-57-05.202526.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-02-02T08-57-05.202526.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_02_02T08_57_05.202526", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-02-02T08-57-05.202526.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-02-02T08-57-05.202526.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_02_02T08_57_05.202526", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-02-02T08-57-05.202526.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-02-02T08-57-05.202526.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_02_02T08_57_05.202526", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-02-02T08-57-05.202526.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-02-02T08-57-05.202526.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_02_02T08_57_05.202526", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-02-02T08-57-05.202526.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-02-02T08-57-05.202526.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_02_02T08_57_05.202526", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-02-02T08-57-05.202526.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-02-02T08-57-05.202526.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_02_02T08_57_05.202526", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-02T08-57-05.202526.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-02T08-57-05.202526.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_02_02T08_57_05.202526", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-02-02T08-57-05.202526.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-02-02T08-57-05.202526.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_02_02T08_57_05.202526", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-02T08-57-05.202526.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-02T08-57-05.202526.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_02_02T08_57_05.202526", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-02T08-57-05.202526.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-02T08-57-05.202526.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_02_02T08_57_05.202526", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-02-02T08-57-05.202526.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-02-02T08-57-05.202526.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_02_02T08_57_05.202526", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-02-02T08-57-05.202526.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-02-02T08-57-05.202526.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_02_02T08_57_05.202526", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-02-02T08-57-05.202526.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-02-02T08-57-05.202526.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_02_02T08_57_05.202526", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-02T08-57-05.202526.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-02T08-57-05.202526.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_02_02T08_57_05.202526", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-02T08-57-05.202526.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-02T08-57-05.202526.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_02_02T08_57_05.202526", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-02T08-57-05.202526.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-02T08-57-05.202526.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_02_02T08_57_05.202526", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-02-02T08-57-05.202526.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-02-02T08-57-05.202526.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_02_02T08_57_05.202526", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-02T08-57-05.202526.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-02T08-57-05.202526.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_02_02T08_57_05.202526", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-02T08-57-05.202526.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-02T08-57-05.202526.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_02_02T08_57_05.202526", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-02T08-57-05.202526.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-02T08-57-05.202526.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_02_02T08_57_05.202526", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-02T08-57-05.202526.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-02T08-57-05.202526.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_02_02T08_57_05.202526", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-02-02T08-57-05.202526.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-02-02T08-57-05.202526.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_02_02T08_57_05.202526", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-02T08-57-05.202526.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-02T08-57-05.202526.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_02_02T08_57_05.202526", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-02T08-57-05.202526.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-02T08-57-05.202526.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_02_02T08_57_05.202526", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-02T08-57-05.202526.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-02T08-57-05.202526.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_02_02T08_57_05.202526", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-02T08-57-05.202526.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-02T08-57-05.202526.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_02_02T08_57_05.202526", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-02-02T08-57-05.202526.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-02-02T08-57-05.202526.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_02_02T08_57_05.202526", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-02-02T08-57-05.202526.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-02-02T08-57-05.202526.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_02_02T08_57_05.202526", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-02-02T08-57-05.202526.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-02-02T08-57-05.202526.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_02_02T08_57_05.202526", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-02-02T08-57-05.202526.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-02-02T08-57-05.202526.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_02_02T08_57_05.202526", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-02T08-57-05.202526.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-02T08-57-05.202526.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_02_02T08_57_05.202526", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-02-02T08-57-05.202526.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-02-02T08-57-05.202526.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_02_02T08_57_05.202526", "path": ["**/details_harness|hendrycksTest-management|5_2024-02-02T08-57-05.202526.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-02-02T08-57-05.202526.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_02_02T08_57_05.202526", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-02-02T08-57-05.202526.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-02-02T08-57-05.202526.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_02_02T08_57_05.202526", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-02-02T08-57-05.202526.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-02-02T08-57-05.202526.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_02_02T08_57_05.202526", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-02-02T08-57-05.202526.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-02-02T08-57-05.202526.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_02_02T08_57_05.202526", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-02-02T08-57-05.202526.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-02-02T08-57-05.202526.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_02_02T08_57_05.202526", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-02T08-57-05.202526.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-02T08-57-05.202526.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_02_02T08_57_05.202526", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-02-02T08-57-05.202526.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-02-02T08-57-05.202526.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_02_02T08_57_05.202526", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-02-02T08-57-05.202526.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-02-02T08-57-05.202526.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_02_02T08_57_05.202526", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-02-02T08-57-05.202526.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-02-02T08-57-05.202526.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_02_02T08_57_05.202526", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-02-02T08-57-05.202526.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-02-02T08-57-05.202526.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_02_02T08_57_05.202526", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-02-02T08-57-05.202526.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-02-02T08-57-05.202526.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_02_02T08_57_05.202526", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-02-02T08-57-05.202526.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-02-02T08-57-05.202526.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_02_02T08_57_05.202526", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-02-02T08-57-05.202526.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-02-02T08-57-05.202526.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_02_02T08_57_05.202526", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-02-02T08-57-05.202526.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-02-02T08-57-05.202526.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_02_02T08_57_05.202526", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-02-02T08-57-05.202526.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-02-02T08-57-05.202526.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_02_02T08_57_05.202526", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-02-02T08-57-05.202526.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-02-02T08-57-05.202526.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_02_02T08_57_05.202526", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-02T08-57-05.202526.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-02T08-57-05.202526.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_02_02T08_57_05.202526", "path": ["**/details_harness|hendrycksTest-virology|5_2024-02-02T08-57-05.202526.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-02-02T08-57-05.202526.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_02_02T08_57_05.202526", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-02-02T08-57-05.202526.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-02-02T08-57-05.202526.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_02_02T08_57_05.202526", "path": ["**/details_harness|truthfulqa:mc|0_2024-02-02T08-57-05.202526.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-02-02T08-57-05.202526.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_02_02T08_57_05.202526", "path": ["**/details_harness|winogrande|5_2024-02-02T08-57-05.202526.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-02-02T08-57-05.202526.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_02_02T08_57_05.202526", "path": ["results_2024-02-02T08-57-05.202526.parquet"]}, {"split": "latest", "path": ["results_2024-02-02T08-57-05.202526.parquet"]}]}]}
2024-02-02T08:59:48+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of Technoculture/Medmerge-tulu-70b Dataset automatically created during the evaluation run of model Technoculture/Medmerge-tulu-70b on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-02-02T08:57:05.202526(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of Technoculture/Medmerge-tulu-70b\n\n\n\nDataset automatically created during the evaluation run of model Technoculture/Medmerge-tulu-70b on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-02-02T08:57:05.202526(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of Technoculture/Medmerge-tulu-70b\n\n\n\nDataset automatically created during the evaluation run of model Technoculture/Medmerge-tulu-70b on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-02-02T08:57:05.202526(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
741cdd46623af7f9375f7b96d144f49f0991bd61
# Dataset Card for Evaluation run of rizla/rizla55b <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [rizla/rizla55b](https://huggingface.co/rizla/rizla55b) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_rizla__rizla55b", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-02-02T09:00:01.266295](https://huggingface.co/datasets/open-llm-leaderboard/details_rizla__rizla55b/blob/main/results_2024-02-02T09-00-01.266295.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.629966070509496, "acc_stderr": 0.0328646103693209, "acc_norm": 0.6377234122481039, "acc_norm_stderr": 0.03355760561589414, "mc1": 0.38555691554467564, "mc1_stderr": 0.01703883901059167, "mc2": 0.5559179467355304, "mc2_stderr": 0.015414641498233956 }, "harness|arc:challenge|25": { "acc": 0.5554607508532423, "acc_stderr": 0.01452122640562707, "acc_norm": 0.6032423208191127, "acc_norm_stderr": 0.014296513020180628 }, "harness|hellaswag|10": { "acc": 0.5973909579764987, "acc_stderr": 0.004894210011303203, "acc_norm": 0.8042222664807808, "acc_norm_stderr": 0.003959872578165267 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.31, "acc_stderr": 0.04648231987117316, "acc_norm": 0.31, "acc_norm_stderr": 0.04648231987117316 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.6074074074074074, "acc_stderr": 0.0421850621536888, "acc_norm": 0.6074074074074074, "acc_norm_stderr": 0.0421850621536888 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.7368421052631579, "acc_stderr": 0.03583496176361074, "acc_norm": 0.7368421052631579, "acc_norm_stderr": 0.03583496176361074 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.7, "acc_stderr": 0.046056618647183814, "acc_norm": 0.7, "acc_norm_stderr": 0.046056618647183814 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.6339622641509434, "acc_stderr": 0.029647813539365245, "acc_norm": 0.6339622641509434, "acc_norm_stderr": 0.029647813539365245 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.7152777777777778, "acc_stderr": 0.037738099906869334, "acc_norm": 0.7152777777777778, "acc_norm_stderr": 0.037738099906869334 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.44, "acc_stderr": 0.04988876515698589, "acc_norm": 0.44, "acc_norm_stderr": 0.04988876515698589 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.49, "acc_stderr": 0.05024183937956912, "acc_norm": 0.49, "acc_norm_stderr": 0.05024183937956912 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.36, "acc_stderr": 0.04824181513244218, "acc_norm": 0.36, "acc_norm_stderr": 0.04824181513244218 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.5838150289017341, "acc_stderr": 0.03758517775404948, "acc_norm": 0.5838150289017341, "acc_norm_stderr": 0.03758517775404948 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.4117647058823529, "acc_stderr": 0.048971049527263666, "acc_norm": 0.4117647058823529, "acc_norm_stderr": 0.048971049527263666 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.72, "acc_stderr": 0.04512608598542128, "acc_norm": 0.72, "acc_norm_stderr": 0.04512608598542128 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.5659574468085107, "acc_stderr": 0.03240038086792747, "acc_norm": 0.5659574468085107, "acc_norm_stderr": 0.03240038086792747 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.49122807017543857, "acc_stderr": 0.047028804320496165, "acc_norm": 0.49122807017543857, "acc_norm_stderr": 0.047028804320496165 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.5655172413793104, "acc_stderr": 0.04130740879555498, "acc_norm": 0.5655172413793104, "acc_norm_stderr": 0.04130740879555498 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.4656084656084656, "acc_stderr": 0.025690321762493838, "acc_norm": 0.4656084656084656, "acc_norm_stderr": 0.025690321762493838 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.5238095238095238, "acc_stderr": 0.04467062628403273, "acc_norm": 0.5238095238095238, "acc_norm_stderr": 0.04467062628403273 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.45, "acc_stderr": 0.049999999999999996, "acc_norm": 0.45, "acc_norm_stderr": 0.049999999999999996 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.7387096774193549, "acc_stderr": 0.024993053397764826, "acc_norm": 0.7387096774193549, "acc_norm_stderr": 0.024993053397764826 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.4482758620689655, "acc_stderr": 0.03499113137676744, "acc_norm": 0.4482758620689655, "acc_norm_stderr": 0.03499113137676744 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.71, "acc_stderr": 0.045604802157206845, "acc_norm": 0.71, "acc_norm_stderr": 0.045604802157206845 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.7515151515151515, "acc_stderr": 0.033744026441394036, "acc_norm": 0.7515151515151515, "acc_norm_stderr": 0.033744026441394036 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.7727272727272727, "acc_stderr": 0.029857515673386417, "acc_norm": 0.7727272727272727, "acc_norm_stderr": 0.029857515673386417 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.8549222797927462, "acc_stderr": 0.025416343096306426, "acc_norm": 0.8549222797927462, "acc_norm_stderr": 0.025416343096306426 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.6461538461538462, "acc_stderr": 0.02424378399406217, "acc_norm": 0.6461538461538462, "acc_norm_stderr": 0.02424378399406217 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.34444444444444444, "acc_stderr": 0.028972648884844267, "acc_norm": 0.34444444444444444, "acc_norm_stderr": 0.028972648884844267 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.7226890756302521, "acc_stderr": 0.029079374539480007, "acc_norm": 0.7226890756302521, "acc_norm_stderr": 0.029079374539480007 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.4105960264900662, "acc_stderr": 0.04016689594849928, "acc_norm": 0.4105960264900662, "acc_norm_stderr": 0.04016689594849928 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.818348623853211, "acc_stderr": 0.01653061740926688, "acc_norm": 0.818348623853211, "acc_norm_stderr": 0.01653061740926688 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.5601851851851852, "acc_stderr": 0.0338517797604481, "acc_norm": 0.5601851851851852, "acc_norm_stderr": 0.0338517797604481 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.7941176470588235, "acc_stderr": 0.028379449451588663, "acc_norm": 0.7941176470588235, "acc_norm_stderr": 0.028379449451588663 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.8227848101265823, "acc_stderr": 0.024856364184503217, "acc_norm": 0.8227848101265823, "acc_norm_stderr": 0.024856364184503217 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.6591928251121076, "acc_stderr": 0.0318114974705536, "acc_norm": 0.6591928251121076, "acc_norm_stderr": 0.0318114974705536 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.7251908396946565, "acc_stderr": 0.039153454088478354, "acc_norm": 0.7251908396946565, "acc_norm_stderr": 0.039153454088478354 }, "harness|hendrycksTest-international_law|5": { "acc": 0.7851239669421488, "acc_stderr": 0.037494924487096966, "acc_norm": 0.7851239669421488, "acc_norm_stderr": 0.037494924487096966 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.7314814814814815, "acc_stderr": 0.042844679680521934, "acc_norm": 0.7314814814814815, "acc_norm_stderr": 0.042844679680521934 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.7055214723926381, "acc_stderr": 0.03581165790474082, "acc_norm": 0.7055214723926381, "acc_norm_stderr": 0.03581165790474082 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.48214285714285715, "acc_stderr": 0.047427623612430116, "acc_norm": 0.48214285714285715, "acc_norm_stderr": 0.047427623612430116 }, "harness|hendrycksTest-management|5": { "acc": 0.7961165048543689, "acc_stderr": 0.039891398595317706, "acc_norm": 0.7961165048543689, "acc_norm_stderr": 0.039891398595317706 }, "harness|hendrycksTest-marketing|5": { "acc": 0.7905982905982906, "acc_stderr": 0.026655699653922744, "acc_norm": 0.7905982905982906, "acc_norm_stderr": 0.026655699653922744 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.66, "acc_stderr": 0.04760952285695237, "acc_norm": 0.66, "acc_norm_stderr": 0.04760952285695237 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.789272030651341, "acc_stderr": 0.014583812465862545, "acc_norm": 0.789272030651341, "acc_norm_stderr": 0.014583812465862545 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.684971098265896, "acc_stderr": 0.02500931379006971, "acc_norm": 0.684971098265896, "acc_norm_stderr": 0.02500931379006971 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.4692737430167598, "acc_stderr": 0.016690896161944385, "acc_norm": 0.4692737430167598, "acc_norm_stderr": 0.016690896161944385 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.7058823529411765, "acc_stderr": 0.026090162504279056, "acc_norm": 0.7058823529411765, "acc_norm_stderr": 0.026090162504279056 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.707395498392283, "acc_stderr": 0.02583989833487798, "acc_norm": 0.707395498392283, "acc_norm_stderr": 0.02583989833487798 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.7314814814814815, "acc_stderr": 0.024659685185967308, "acc_norm": 0.7314814814814815, "acc_norm_stderr": 0.024659685185967308 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.48936170212765956, "acc_stderr": 0.029820747191422473, "acc_norm": 0.48936170212765956, "acc_norm_stderr": 0.029820747191422473 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.5123859191655802, "acc_stderr": 0.012766317315473551, "acc_norm": 0.5123859191655802, "acc_norm_stderr": 0.012766317315473551 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.6580882352941176, "acc_stderr": 0.028814722422254177, "acc_norm": 0.6580882352941176, "acc_norm_stderr": 0.028814722422254177 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.7009803921568627, "acc_stderr": 0.018521756215423024, "acc_norm": 0.7009803921568627, "acc_norm_stderr": 0.018521756215423024 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.6545454545454545, "acc_stderr": 0.04554619617541054, "acc_norm": 0.6545454545454545, "acc_norm_stderr": 0.04554619617541054 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.726530612244898, "acc_stderr": 0.028535560337128445, "acc_norm": 0.726530612244898, "acc_norm_stderr": 0.028535560337128445 }, "harness|hendrycksTest-sociology|5": { "acc": 0.8009950248756219, "acc_stderr": 0.028231365092758406, "acc_norm": 0.8009950248756219, "acc_norm_stderr": 0.028231365092758406 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.82, "acc_stderr": 0.038612291966536934, "acc_norm": 0.82, "acc_norm_stderr": 0.038612291966536934 }, "harness|hendrycksTest-virology|5": { "acc": 0.463855421686747, "acc_stderr": 0.03882310850890594, "acc_norm": 0.463855421686747, "acc_norm_stderr": 0.03882310850890594 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.8011695906432749, "acc_stderr": 0.030611116557432528, "acc_norm": 0.8011695906432749, "acc_norm_stderr": 0.030611116557432528 }, "harness|truthfulqa:mc|0": { "mc1": 0.38555691554467564, "mc1_stderr": 0.01703883901059167, "mc2": 0.5559179467355304, "mc2_stderr": 0.015414641498233956 }, "harness|winogrande|5": { "acc": 0.7884767166535123, "acc_stderr": 0.01147774768422318 }, "harness|gsm8k|5": { "acc": 0.26838514025777105, "acc_stderr": 0.01220570268801367 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_rizla__rizla55b
[ "region:us" ]
2024-02-02T09:02:23+00:00
{"pretty_name": "Evaluation run of rizla/rizla55b", "dataset_summary": "Dataset automatically created during the evaluation run of model [rizla/rizla55b](https://huggingface.co/rizla/rizla55b) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_rizla__rizla55b\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-02-02T09:00:01.266295](https://huggingface.co/datasets/open-llm-leaderboard/details_rizla__rizla55b/blob/main/results_2024-02-02T09-00-01.266295.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.629966070509496,\n \"acc_stderr\": 0.0328646103693209,\n \"acc_norm\": 0.6377234122481039,\n \"acc_norm_stderr\": 0.03355760561589414,\n \"mc1\": 0.38555691554467564,\n \"mc1_stderr\": 0.01703883901059167,\n \"mc2\": 0.5559179467355304,\n \"mc2_stderr\": 0.015414641498233956\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.5554607508532423,\n \"acc_stderr\": 0.01452122640562707,\n \"acc_norm\": 0.6032423208191127,\n \"acc_norm_stderr\": 0.014296513020180628\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.5973909579764987,\n \"acc_stderr\": 0.004894210011303203,\n \"acc_norm\": 0.8042222664807808,\n \"acc_norm_stderr\": 0.003959872578165267\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.31,\n \"acc_stderr\": 0.04648231987117316,\n \"acc_norm\": 0.31,\n \"acc_norm_stderr\": 0.04648231987117316\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.6074074074074074,\n \"acc_stderr\": 0.0421850621536888,\n \"acc_norm\": 0.6074074074074074,\n \"acc_norm_stderr\": 0.0421850621536888\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.7368421052631579,\n \"acc_stderr\": 0.03583496176361074,\n \"acc_norm\": 0.7368421052631579,\n \"acc_norm_stderr\": 0.03583496176361074\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.7,\n \"acc_stderr\": 0.046056618647183814,\n \"acc_norm\": 0.7,\n \"acc_norm_stderr\": 0.046056618647183814\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.6339622641509434,\n \"acc_stderr\": 0.029647813539365245,\n \"acc_norm\": 0.6339622641509434,\n \"acc_norm_stderr\": 0.029647813539365245\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.7152777777777778,\n \"acc_stderr\": 0.037738099906869334,\n \"acc_norm\": 0.7152777777777778,\n \"acc_norm_stderr\": 0.037738099906869334\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.44,\n \"acc_stderr\": 0.04988876515698589,\n \"acc_norm\": 0.44,\n \"acc_norm_stderr\": 0.04988876515698589\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.49,\n \"acc_stderr\": 0.05024183937956912,\n \"acc_norm\": 0.49,\n \"acc_norm_stderr\": 0.05024183937956912\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.36,\n \"acc_stderr\": 0.04824181513244218,\n \"acc_norm\": 0.36,\n \"acc_norm_stderr\": 0.04824181513244218\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.5838150289017341,\n \"acc_stderr\": 0.03758517775404948,\n \"acc_norm\": 0.5838150289017341,\n \"acc_norm_stderr\": 0.03758517775404948\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.4117647058823529,\n \"acc_stderr\": 0.048971049527263666,\n \"acc_norm\": 0.4117647058823529,\n \"acc_norm_stderr\": 0.048971049527263666\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.72,\n \"acc_stderr\": 0.04512608598542128,\n \"acc_norm\": 0.72,\n \"acc_norm_stderr\": 0.04512608598542128\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.5659574468085107,\n \"acc_stderr\": 0.03240038086792747,\n \"acc_norm\": 0.5659574468085107,\n \"acc_norm_stderr\": 0.03240038086792747\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.49122807017543857,\n \"acc_stderr\": 0.047028804320496165,\n \"acc_norm\": 0.49122807017543857,\n \"acc_norm_stderr\": 0.047028804320496165\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.5655172413793104,\n \"acc_stderr\": 0.04130740879555498,\n \"acc_norm\": 0.5655172413793104,\n \"acc_norm_stderr\": 0.04130740879555498\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.4656084656084656,\n \"acc_stderr\": 0.025690321762493838,\n \"acc_norm\": 0.4656084656084656,\n \"acc_norm_stderr\": 0.025690321762493838\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.5238095238095238,\n \"acc_stderr\": 0.04467062628403273,\n \"acc_norm\": 0.5238095238095238,\n \"acc_norm_stderr\": 0.04467062628403273\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.45,\n \"acc_stderr\": 0.049999999999999996,\n \"acc_norm\": 0.45,\n \"acc_norm_stderr\": 0.049999999999999996\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.7387096774193549,\n \"acc_stderr\": 0.024993053397764826,\n \"acc_norm\": 0.7387096774193549,\n \"acc_norm_stderr\": 0.024993053397764826\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.4482758620689655,\n \"acc_stderr\": 0.03499113137676744,\n \"acc_norm\": 0.4482758620689655,\n \"acc_norm_stderr\": 0.03499113137676744\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.71,\n \"acc_stderr\": 0.045604802157206845,\n \"acc_norm\": 0.71,\n \"acc_norm_stderr\": 0.045604802157206845\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.7515151515151515,\n \"acc_stderr\": 0.033744026441394036,\n \"acc_norm\": 0.7515151515151515,\n \"acc_norm_stderr\": 0.033744026441394036\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.7727272727272727,\n \"acc_stderr\": 0.029857515673386417,\n \"acc_norm\": 0.7727272727272727,\n \"acc_norm_stderr\": 0.029857515673386417\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.8549222797927462,\n \"acc_stderr\": 0.025416343096306426,\n \"acc_norm\": 0.8549222797927462,\n \"acc_norm_stderr\": 0.025416343096306426\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.6461538461538462,\n \"acc_stderr\": 0.02424378399406217,\n \"acc_norm\": 0.6461538461538462,\n \"acc_norm_stderr\": 0.02424378399406217\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.34444444444444444,\n \"acc_stderr\": 0.028972648884844267,\n \"acc_norm\": 0.34444444444444444,\n \"acc_norm_stderr\": 0.028972648884844267\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.7226890756302521,\n \"acc_stderr\": 0.029079374539480007,\n \"acc_norm\": 0.7226890756302521,\n \"acc_norm_stderr\": 0.029079374539480007\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.4105960264900662,\n \"acc_stderr\": 0.04016689594849928,\n \"acc_norm\": 0.4105960264900662,\n \"acc_norm_stderr\": 0.04016689594849928\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.818348623853211,\n \"acc_stderr\": 0.01653061740926688,\n \"acc_norm\": 0.818348623853211,\n \"acc_norm_stderr\": 0.01653061740926688\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.5601851851851852,\n \"acc_stderr\": 0.0338517797604481,\n \"acc_norm\": 0.5601851851851852,\n \"acc_norm_stderr\": 0.0338517797604481\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.7941176470588235,\n \"acc_stderr\": 0.028379449451588663,\n \"acc_norm\": 0.7941176470588235,\n \"acc_norm_stderr\": 0.028379449451588663\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.8227848101265823,\n \"acc_stderr\": 0.024856364184503217,\n \"acc_norm\": 0.8227848101265823,\n \"acc_norm_stderr\": 0.024856364184503217\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.6591928251121076,\n \"acc_stderr\": 0.0318114974705536,\n \"acc_norm\": 0.6591928251121076,\n \"acc_norm_stderr\": 0.0318114974705536\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.7251908396946565,\n \"acc_stderr\": 0.039153454088478354,\n \"acc_norm\": 0.7251908396946565,\n \"acc_norm_stderr\": 0.039153454088478354\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.7851239669421488,\n \"acc_stderr\": 0.037494924487096966,\n \"acc_norm\": 0.7851239669421488,\n \"acc_norm_stderr\": 0.037494924487096966\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.7314814814814815,\n \"acc_stderr\": 0.042844679680521934,\n \"acc_norm\": 0.7314814814814815,\n \"acc_norm_stderr\": 0.042844679680521934\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.7055214723926381,\n \"acc_stderr\": 0.03581165790474082,\n \"acc_norm\": 0.7055214723926381,\n \"acc_norm_stderr\": 0.03581165790474082\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.48214285714285715,\n \"acc_stderr\": 0.047427623612430116,\n \"acc_norm\": 0.48214285714285715,\n \"acc_norm_stderr\": 0.047427623612430116\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.7961165048543689,\n \"acc_stderr\": 0.039891398595317706,\n \"acc_norm\": 0.7961165048543689,\n \"acc_norm_stderr\": 0.039891398595317706\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.7905982905982906,\n \"acc_stderr\": 0.026655699653922744,\n \"acc_norm\": 0.7905982905982906,\n \"acc_norm_stderr\": 0.026655699653922744\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.66,\n \"acc_stderr\": 0.04760952285695237,\n \"acc_norm\": 0.66,\n \"acc_norm_stderr\": 0.04760952285695237\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.789272030651341,\n \"acc_stderr\": 0.014583812465862545,\n \"acc_norm\": 0.789272030651341,\n \"acc_norm_stderr\": 0.014583812465862545\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.684971098265896,\n \"acc_stderr\": 0.02500931379006971,\n \"acc_norm\": 0.684971098265896,\n \"acc_norm_stderr\": 0.02500931379006971\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.4692737430167598,\n \"acc_stderr\": 0.016690896161944385,\n \"acc_norm\": 0.4692737430167598,\n \"acc_norm_stderr\": 0.016690896161944385\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.7058823529411765,\n \"acc_stderr\": 0.026090162504279056,\n \"acc_norm\": 0.7058823529411765,\n \"acc_norm_stderr\": 0.026090162504279056\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.707395498392283,\n \"acc_stderr\": 0.02583989833487798,\n \"acc_norm\": 0.707395498392283,\n \"acc_norm_stderr\": 0.02583989833487798\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.7314814814814815,\n \"acc_stderr\": 0.024659685185967308,\n \"acc_norm\": 0.7314814814814815,\n \"acc_norm_stderr\": 0.024659685185967308\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.48936170212765956,\n \"acc_stderr\": 0.029820747191422473,\n \"acc_norm\": 0.48936170212765956,\n \"acc_norm_stderr\": 0.029820747191422473\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.5123859191655802,\n \"acc_stderr\": 0.012766317315473551,\n \"acc_norm\": 0.5123859191655802,\n \"acc_norm_stderr\": 0.012766317315473551\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.6580882352941176,\n \"acc_stderr\": 0.028814722422254177,\n \"acc_norm\": 0.6580882352941176,\n \"acc_norm_stderr\": 0.028814722422254177\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.7009803921568627,\n \"acc_stderr\": 0.018521756215423024,\n \"acc_norm\": 0.7009803921568627,\n \"acc_norm_stderr\": 0.018521756215423024\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6545454545454545,\n \"acc_stderr\": 0.04554619617541054,\n \"acc_norm\": 0.6545454545454545,\n \"acc_norm_stderr\": 0.04554619617541054\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.726530612244898,\n \"acc_stderr\": 0.028535560337128445,\n \"acc_norm\": 0.726530612244898,\n \"acc_norm_stderr\": 0.028535560337128445\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.8009950248756219,\n \"acc_stderr\": 0.028231365092758406,\n \"acc_norm\": 0.8009950248756219,\n \"acc_norm_stderr\": 0.028231365092758406\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.82,\n \"acc_stderr\": 0.038612291966536934,\n \"acc_norm\": 0.82,\n \"acc_norm_stderr\": 0.038612291966536934\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.463855421686747,\n \"acc_stderr\": 0.03882310850890594,\n \"acc_norm\": 0.463855421686747,\n \"acc_norm_stderr\": 0.03882310850890594\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.8011695906432749,\n \"acc_stderr\": 0.030611116557432528,\n \"acc_norm\": 0.8011695906432749,\n \"acc_norm_stderr\": 0.030611116557432528\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.38555691554467564,\n \"mc1_stderr\": 0.01703883901059167,\n \"mc2\": 0.5559179467355304,\n \"mc2_stderr\": 0.015414641498233956\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.7884767166535123,\n \"acc_stderr\": 0.01147774768422318\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.26838514025777105,\n \"acc_stderr\": 0.01220570268801367\n }\n}\n```", "repo_url": "https://huggingface.co/rizla/rizla55b", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_02_02T09_00_01.266295", "path": ["**/details_harness|arc:challenge|25_2024-02-02T09-00-01.266295.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-02-02T09-00-01.266295.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_02_02T09_00_01.266295", "path": ["**/details_harness|gsm8k|5_2024-02-02T09-00-01.266295.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-02-02T09-00-01.266295.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_02_02T09_00_01.266295", "path": ["**/details_harness|hellaswag|10_2024-02-02T09-00-01.266295.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-02-02T09-00-01.266295.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_02_02T09_00_01.266295", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-02T09-00-01.266295.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-02-02T09-00-01.266295.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-02-02T09-00-01.266295.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-02-02T09-00-01.266295.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-02T09-00-01.266295.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-02-02T09-00-01.266295.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-02-02T09-00-01.266295.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-02-02T09-00-01.266295.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-02-02T09-00-01.266295.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-02-02T09-00-01.266295.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-02-02T09-00-01.266295.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-02-02T09-00-01.266295.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-02T09-00-01.266295.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-02-02T09-00-01.266295.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-02T09-00-01.266295.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-02T09-00-01.266295.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-02-02T09-00-01.266295.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-02-02T09-00-01.266295.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-02-02T09-00-01.266295.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-02T09-00-01.266295.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-02T09-00-01.266295.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-02T09-00-01.266295.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-02-02T09-00-01.266295.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-02T09-00-01.266295.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-02T09-00-01.266295.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-02T09-00-01.266295.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-02T09-00-01.266295.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-02-02T09-00-01.266295.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-02T09-00-01.266295.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-02T09-00-01.266295.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-02T09-00-01.266295.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-02T09-00-01.266295.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-02-02T09-00-01.266295.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-02-02T09-00-01.266295.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-02-02T09-00-01.266295.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-02-02T09-00-01.266295.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-02T09-00-01.266295.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-02-02T09-00-01.266295.parquet", "**/details_harness|hendrycksTest-management|5_2024-02-02T09-00-01.266295.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-02-02T09-00-01.266295.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-02-02T09-00-01.266295.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-02-02T09-00-01.266295.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-02-02T09-00-01.266295.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-02T09-00-01.266295.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-02-02T09-00-01.266295.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-02-02T09-00-01.266295.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-02-02T09-00-01.266295.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-02-02T09-00-01.266295.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-02-02T09-00-01.266295.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-02-02T09-00-01.266295.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-02-02T09-00-01.266295.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-02-02T09-00-01.266295.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-02-02T09-00-01.266295.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-02-02T09-00-01.266295.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-02T09-00-01.266295.parquet", "**/details_harness|hendrycksTest-virology|5_2024-02-02T09-00-01.266295.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-02-02T09-00-01.266295.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-02T09-00-01.266295.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-02-02T09-00-01.266295.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-02-02T09-00-01.266295.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-02-02T09-00-01.266295.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-02T09-00-01.266295.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-02-02T09-00-01.266295.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-02-02T09-00-01.266295.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-02-02T09-00-01.266295.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-02-02T09-00-01.266295.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-02-02T09-00-01.266295.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-02-02T09-00-01.266295.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-02-02T09-00-01.266295.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-02T09-00-01.266295.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-02-02T09-00-01.266295.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-02T09-00-01.266295.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-02T09-00-01.266295.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-02-02T09-00-01.266295.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-02-02T09-00-01.266295.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-02-02T09-00-01.266295.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-02T09-00-01.266295.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-02T09-00-01.266295.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-02T09-00-01.266295.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-02-02T09-00-01.266295.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-02T09-00-01.266295.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-02T09-00-01.266295.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-02T09-00-01.266295.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-02T09-00-01.266295.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-02-02T09-00-01.266295.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-02T09-00-01.266295.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-02T09-00-01.266295.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-02T09-00-01.266295.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-02T09-00-01.266295.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-02-02T09-00-01.266295.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-02-02T09-00-01.266295.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-02-02T09-00-01.266295.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-02-02T09-00-01.266295.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-02T09-00-01.266295.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-02-02T09-00-01.266295.parquet", "**/details_harness|hendrycksTest-management|5_2024-02-02T09-00-01.266295.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-02-02T09-00-01.266295.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-02-02T09-00-01.266295.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-02-02T09-00-01.266295.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-02-02T09-00-01.266295.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-02T09-00-01.266295.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-02-02T09-00-01.266295.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-02-02T09-00-01.266295.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-02-02T09-00-01.266295.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-02-02T09-00-01.266295.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-02-02T09-00-01.266295.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-02-02T09-00-01.266295.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-02-02T09-00-01.266295.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-02-02T09-00-01.266295.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-02-02T09-00-01.266295.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-02-02T09-00-01.266295.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-02T09-00-01.266295.parquet", "**/details_harness|hendrycksTest-virology|5_2024-02-02T09-00-01.266295.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-02-02T09-00-01.266295.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_02_02T09_00_01.266295", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-02T09-00-01.266295.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-02T09-00-01.266295.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_02_02T09_00_01.266295", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-02-02T09-00-01.266295.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-02-02T09-00-01.266295.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_02_02T09_00_01.266295", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-02-02T09-00-01.266295.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-02-02T09-00-01.266295.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_02_02T09_00_01.266295", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-02-02T09-00-01.266295.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-02-02T09-00-01.266295.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_02_02T09_00_01.266295", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-02T09-00-01.266295.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-02T09-00-01.266295.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_02_02T09_00_01.266295", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-02-02T09-00-01.266295.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-02-02T09-00-01.266295.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_02_02T09_00_01.266295", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-02-02T09-00-01.266295.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-02-02T09-00-01.266295.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_02_02T09_00_01.266295", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-02-02T09-00-01.266295.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-02-02T09-00-01.266295.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_02_02T09_00_01.266295", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-02-02T09-00-01.266295.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-02-02T09-00-01.266295.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_02_02T09_00_01.266295", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-02-02T09-00-01.266295.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-02-02T09-00-01.266295.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_02_02T09_00_01.266295", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-02-02T09-00-01.266295.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-02-02T09-00-01.266295.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_02_02T09_00_01.266295", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-02-02T09-00-01.266295.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-02-02T09-00-01.266295.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_02_02T09_00_01.266295", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-02T09-00-01.266295.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-02T09-00-01.266295.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_02_02T09_00_01.266295", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-02-02T09-00-01.266295.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-02-02T09-00-01.266295.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_02_02T09_00_01.266295", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-02T09-00-01.266295.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-02T09-00-01.266295.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_02_02T09_00_01.266295", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-02T09-00-01.266295.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-02T09-00-01.266295.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_02_02T09_00_01.266295", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-02-02T09-00-01.266295.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-02-02T09-00-01.266295.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_02_02T09_00_01.266295", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-02-02T09-00-01.266295.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-02-02T09-00-01.266295.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_02_02T09_00_01.266295", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-02-02T09-00-01.266295.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-02-02T09-00-01.266295.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_02_02T09_00_01.266295", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-02T09-00-01.266295.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-02T09-00-01.266295.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_02_02T09_00_01.266295", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-02T09-00-01.266295.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-02T09-00-01.266295.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_02_02T09_00_01.266295", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-02T09-00-01.266295.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-02T09-00-01.266295.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_02_02T09_00_01.266295", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-02-02T09-00-01.266295.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-02-02T09-00-01.266295.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_02_02T09_00_01.266295", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-02T09-00-01.266295.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-02T09-00-01.266295.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_02_02T09_00_01.266295", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-02T09-00-01.266295.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-02T09-00-01.266295.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_02_02T09_00_01.266295", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-02T09-00-01.266295.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-02T09-00-01.266295.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_02_02T09_00_01.266295", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-02T09-00-01.266295.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-02T09-00-01.266295.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_02_02T09_00_01.266295", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-02-02T09-00-01.266295.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-02-02T09-00-01.266295.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_02_02T09_00_01.266295", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-02T09-00-01.266295.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-02T09-00-01.266295.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_02_02T09_00_01.266295", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-02T09-00-01.266295.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-02T09-00-01.266295.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_02_02T09_00_01.266295", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-02T09-00-01.266295.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-02T09-00-01.266295.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_02_02T09_00_01.266295", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-02T09-00-01.266295.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-02T09-00-01.266295.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_02_02T09_00_01.266295", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-02-02T09-00-01.266295.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-02-02T09-00-01.266295.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_02_02T09_00_01.266295", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-02-02T09-00-01.266295.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-02-02T09-00-01.266295.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_02_02T09_00_01.266295", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-02-02T09-00-01.266295.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-02-02T09-00-01.266295.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_02_02T09_00_01.266295", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-02-02T09-00-01.266295.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-02-02T09-00-01.266295.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_02_02T09_00_01.266295", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-02T09-00-01.266295.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-02T09-00-01.266295.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_02_02T09_00_01.266295", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-02-02T09-00-01.266295.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-02-02T09-00-01.266295.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_02_02T09_00_01.266295", "path": ["**/details_harness|hendrycksTest-management|5_2024-02-02T09-00-01.266295.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-02-02T09-00-01.266295.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_02_02T09_00_01.266295", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-02-02T09-00-01.266295.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-02-02T09-00-01.266295.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_02_02T09_00_01.266295", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-02-02T09-00-01.266295.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-02-02T09-00-01.266295.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_02_02T09_00_01.266295", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-02-02T09-00-01.266295.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-02-02T09-00-01.266295.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_02_02T09_00_01.266295", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-02-02T09-00-01.266295.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-02-02T09-00-01.266295.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_02_02T09_00_01.266295", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-02T09-00-01.266295.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-02T09-00-01.266295.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_02_02T09_00_01.266295", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-02-02T09-00-01.266295.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-02-02T09-00-01.266295.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_02_02T09_00_01.266295", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-02-02T09-00-01.266295.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-02-02T09-00-01.266295.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_02_02T09_00_01.266295", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-02-02T09-00-01.266295.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-02-02T09-00-01.266295.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_02_02T09_00_01.266295", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-02-02T09-00-01.266295.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-02-02T09-00-01.266295.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_02_02T09_00_01.266295", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-02-02T09-00-01.266295.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-02-02T09-00-01.266295.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_02_02T09_00_01.266295", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-02-02T09-00-01.266295.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-02-02T09-00-01.266295.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_02_02T09_00_01.266295", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-02-02T09-00-01.266295.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-02-02T09-00-01.266295.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_02_02T09_00_01.266295", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-02-02T09-00-01.266295.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-02-02T09-00-01.266295.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_02_02T09_00_01.266295", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-02-02T09-00-01.266295.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-02-02T09-00-01.266295.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_02_02T09_00_01.266295", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-02-02T09-00-01.266295.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-02-02T09-00-01.266295.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_02_02T09_00_01.266295", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-02T09-00-01.266295.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-02T09-00-01.266295.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_02_02T09_00_01.266295", "path": ["**/details_harness|hendrycksTest-virology|5_2024-02-02T09-00-01.266295.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-02-02T09-00-01.266295.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_02_02T09_00_01.266295", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-02-02T09-00-01.266295.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-02-02T09-00-01.266295.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_02_02T09_00_01.266295", "path": ["**/details_harness|truthfulqa:mc|0_2024-02-02T09-00-01.266295.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-02-02T09-00-01.266295.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_02_02T09_00_01.266295", "path": ["**/details_harness|winogrande|5_2024-02-02T09-00-01.266295.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-02-02T09-00-01.266295.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_02_02T09_00_01.266295", "path": ["results_2024-02-02T09-00-01.266295.parquet"]}, {"split": "latest", "path": ["results_2024-02-02T09-00-01.266295.parquet"]}]}]}
2024-02-02T09:02:51+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of rizla/rizla55b Dataset automatically created during the evaluation run of model rizla/rizla55b on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-02-02T09:00:01.266295(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of rizla/rizla55b\n\n\n\nDataset automatically created during the evaluation run of model rizla/rizla55b on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-02-02T09:00:01.266295(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of rizla/rizla55b\n\n\n\nDataset automatically created during the evaluation run of model rizla/rizla55b on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-02-02T09:00:01.266295(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
c211ebf0ff8fefb844168fa6716780f5394a3eb6
# Dataset Card for "instruction-types" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
McSpicyWithMilo/instruction-types-0.2split
[ "region:us" ]
2024-02-02T09:04:02+00:00
{"dataset_info": {"features": [{"name": "instruction_type", "dtype": "string"}, {"name": "instruction", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 28238, "num_examples": 320}, {"name": "test", "num_bytes": 6791, "num_examples": 80}], "download_size": 18706, "dataset_size": 35029}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}, {"split": "test", "path": "data/test-*"}]}]}
2024-02-02T09:04:13+00:00
[]
[]
TAGS #region-us
# Dataset Card for "instruction-types" More Information needed
[ "# Dataset Card for \"instruction-types\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"instruction-types\"\n\nMore Information needed" ]
217470295f597fd4a669f9aba43fde87ca6e187a
The CC BY-NC license allows you to use, modify, and distribute the dataset for non-commercial purposes only, as long as you give appropriate credit to the author.
satyoshi/gauzilla-data
[ "license:cc-by-nc-4.0", "region:us" ]
2024-02-02T09:06:34+00:00
{"license": "cc-by-nc-4.0"}
2024-02-14T18:20:39+00:00
[]
[]
TAGS #license-cc-by-nc-4.0 #region-us
The CC BY-NC license allows you to use, modify, and distribute the dataset for non-commercial purposes only, as long as you give appropriate credit to the author.
[]
[ "TAGS\n#license-cc-by-nc-4.0 #region-us \n" ]
e9c96b469322b25f89376f544d32d30bc123dbd4
**Dataset Card for "QuiltVQA_ALL"** <p align="center"> <img src="https://quilt-llava.github.io/static/images/quilt_vqa_samples.png" alt="fig2" width="90%"/> </p> **Human Generated VQA Dataset for Evaluation** [Quilt-VQA](https://quilt-llava.github.io) is generated by extracting Q&A dataset from naturally occurring questions/answers given in educational histopathology videos. With the help of GPT4 and some handcrafted algorithms, we collect a rich evaluation dataset of 1283 Q&A pairs. Top two rows show image-dependent Q&A pairs and bottom two rows show general-knowledge Q&A pairs. The original question posed by the narrator of the video is highlighted in yellow Furthermore, to generate[Quilt-VQA-RED](https://quilt-llava.github.io), we experimented with the visual prompting methodology outlined in Visual Prompting using Red Circle to evaluate models. This involves utilizing the subset of QUILT-VQA with bounding boxes to create ellipses that encapsulate the concepts highlighted by these boxes. <p align="center"> <img src="https://quilt-llava.github.io/static/images/visual_prompting.png" alt="fig2" width="70%"/> </p> **Citation** ```bibtex @article{seyfioglu2023quilt, title={Quilt-LLaVA: Visual Instruction Tuning by Extracting Localized Narratives from Open-Source Histopathology Videos}, author={Seyfioglu, Mehmet Saygin and Ikezogwo, Wisdom O and Ghezloo, Fatemeh and Krishna, Ranjay and Shapiro, Linda}, journal={arXiv preprint arXiv:2312.04746}, year={2023} } ```
wisdomik/QuiltVQA_All
[ "task_categories:visual-question-answering", "task_categories:question-answering", "size_categories:1K<n<10K", "language:en", "license:mit", "region:us" ]
2024-02-02T09:15:33+00:00
{"language": ["en"], "license": "mit", "size_categories": ["1K<n<10K"], "task_categories": ["visual-question-answering", "question-answering"], "pretty_name": "QUILT-VQA", "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}], "dataset_info": {"features": [{"name": "image", "dtype": "image"}, {"name": "question", "dtype": "string"}, {"name": "answer", "dtype": "string"}, {"name": "answer_type", "dtype": "string"}, {"name": "context", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 301181306.28, "num_examples": 1320}], "download_size": 301211303, "dataset_size": 301181306.28}}
2024-02-08T01:48:40+00:00
[]
[ "en" ]
TAGS #task_categories-visual-question-answering #task_categories-question-answering #size_categories-1K<n<10K #language-English #license-mit #region-us
Dataset Card for "QuiltVQA_ALL" <p align="center"> <img src="URL alt="fig2" width="90%"/> </p> Human Generated VQA Dataset for Evaluation Quilt-VQA is generated by extracting Q&A dataset from naturally occurring questions/answers given in educational histopathology videos. With the help of GPT4 and some handcrafted algorithms, we collect a rich evaluation dataset of 1283 Q&A pairs. Top two rows show image-dependent Q&A pairs and bottom two rows show general-knowledge Q&A pairs. The original question posed by the narrator of the video is highlighted in yellow Furthermore, to generateQuilt-VQA-RED, we experimented with the visual prompting methodology outlined in Visual Prompting using Red Circle to evaluate models. This involves utilizing the subset of QUILT-VQA with bounding boxes to create ellipses that encapsulate the concepts highlighted by these boxes. <p align="center"> <img src="URL alt="fig2" width="70%"/> </p> Citation
[]
[ "TAGS\n#task_categories-visual-question-answering #task_categories-question-answering #size_categories-1K<n<10K #language-English #license-mit #region-us \n" ]
0b5df005958043c763b527c5c9e299233590192d
# Dataset Card for MMCQS Dataset This is the MMCQS Dataset that have been used in the paper "MedSumm: A Multimodal Approach to Summarizing Code-Mixed Hindi-English Clinical Queries" accepted in ECIR 2024. - **Github:** https://github.com/ArkadeepAcharya/MedSumm-ECIR2024 - **Paper:** https://arxiv.org/abs/2401.01596 ## Uses <!-- Address questions around how the dataset is intended to be used. --> Download and unzip the Multimodal_images.zip file to access the images that have been used in the dataset. The image path given in the dataset is with respect to the Multimodal_images folder. ### Disclaimer <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> The dataset includes explicit medical imagery, necessary for an in-depth understanding of the subject matter ## Citation <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> ```bibtex @misc{ghosh2024medsumm, title={MedSumm: A Multimodal Approach to Summarizing Code-Mixed Hindi-English Clinical Queries}, author={Akash Ghosh and Arkadeep Acharya and Prince Jha and Aniket Gaudgaul and Rajdeep Majumdar and Sriparna Saha and Aman Chadha and Raghav Jain and Setu Sinha and Shivani Agarwal}, year={2024}, eprint={2401.01596}, archivePrefix={arXiv}, primaryClass={cs.AI} } ``` For questions or inquiries, please write to [email protected] or create an issue on https://github.com/ArkadeepAcharya/MedSumm-ECIR2024
ArkaAcharya/MMCQSD
[ "task_categories:summarization", "size_categories:1K<n<10K", "language:en", "language:hi", "license:mit", "medical", "arxiv:2401.01596", "region:us" ]
2024-02-02T09:40:57+00:00
{"language": ["en", "hi"], "license": "mit", "size_categories": ["1K<n<10K"], "task_categories": ["summarization"], "dataset_info": {"features": [{"name": "Codemixed_Question", "dtype": "string"}, {"name": "summary", "dtype": "string"}, {"name": "Image_path", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 2706069, "num_examples": 3015}], "download_size": 1342605, "dataset_size": 2706069}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}], "tags": ["medical"]}
2024-02-06T04:39:51+00:00
[ "2401.01596" ]
[ "en", "hi" ]
TAGS #task_categories-summarization #size_categories-1K<n<10K #language-English #language-Hindi #license-mit #medical #arxiv-2401.01596 #region-us
# Dataset Card for MMCQS Dataset This is the MMCQS Dataset that have been used in the paper "MedSumm: A Multimodal Approach to Summarizing Code-Mixed Hindi-English Clinical Queries" accepted in ECIR 2024. - Github: URL - Paper: URL ## Uses Download and unzip the Multimodal_images.zip file to access the images that have been used in the dataset. The image path given in the dataset is with respect to the Multimodal_images folder. ### Disclaimer The dataset includes explicit medical imagery, necessary for an in-depth understanding of the subject matter For questions or inquiries, please write to akashghosh.ag90@URL or create an issue on URL
[ "# Dataset Card for MMCQS Dataset\nThis is the MMCQS Dataset that have been used in the paper \"MedSumm: A Multimodal Approach to Summarizing Code-Mixed Hindi-English Clinical Queries\" accepted in ECIR 2024.\n\n- Github: URL\n\n- Paper: URL", "## Uses\n\n\nDownload and unzip the Multimodal_images.zip file to access the images that have been used in the dataset. The image path given in the dataset is with respect to the Multimodal_images folder.", "### Disclaimer\n\n\n\nThe dataset includes explicit medical imagery, necessary for an in-depth understanding of the subject matter\n\nFor questions or inquiries, please write to akashghosh.ag90@URL or create an issue on URL" ]
[ "TAGS\n#task_categories-summarization #size_categories-1K<n<10K #language-English #language-Hindi #license-mit #medical #arxiv-2401.01596 #region-us \n", "# Dataset Card for MMCQS Dataset\nThis is the MMCQS Dataset that have been used in the paper \"MedSumm: A Multimodal Approach to Summarizing Code-Mixed Hindi-English Clinical Queries\" accepted in ECIR 2024.\n\n- Github: URL\n\n- Paper: URL", "## Uses\n\n\nDownload and unzip the Multimodal_images.zip file to access the images that have been used in the dataset. The image path given in the dataset is with respect to the Multimodal_images folder.", "### Disclaimer\n\n\n\nThe dataset includes explicit medical imagery, necessary for an in-depth understanding of the subject matter\n\nFor questions or inquiries, please write to akashghosh.ag90@URL or create an issue on URL" ]
8e43d40e1978fae791c2c6c44abddeb0541e8052
# Dataset Card for Evaluation run of rizla/raccoon-small <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [rizla/raccoon-small](https://huggingface.co/rizla/raccoon-small) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_rizla__raccoon-small", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-02-02T10:01:35.686366](https://huggingface.co/datasets/open-llm-leaderboard/details_rizla__raccoon-small/blob/main/results_2024-02-02T10-01-35.686366.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.650595848646737, "acc_stderr": 0.03218536411440889, "acc_norm": 0.651267003548253, "acc_norm_stderr": 0.032861158588995604, "mc1": 0.6193390452876377, "mc1_stderr": 0.016997627871907915, "mc2": 0.7673830386789108, "mc2_stderr": 0.013988013317866293 }, "harness|arc:challenge|25": { "acc": 0.7320819112627986, "acc_stderr": 0.01294203019513643, "acc_norm": 0.7440273037542662, "acc_norm_stderr": 0.01275301324124452 }, "harness|hellaswag|10": { "acc": 0.7184823740290779, "acc_stderr": 0.00448820175664258, "acc_norm": 0.8872734515036845, "acc_norm_stderr": 0.003156118964752944 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.35, "acc_stderr": 0.0479372485441102, "acc_norm": 0.35, "acc_norm_stderr": 0.0479372485441102 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.6148148148148148, "acc_stderr": 0.04203921040156279, "acc_norm": 0.6148148148148148, "acc_norm_stderr": 0.04203921040156279 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.6907894736842105, "acc_stderr": 0.037610708698674805, "acc_norm": 0.6907894736842105, "acc_norm_stderr": 0.037610708698674805 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.63, "acc_stderr": 0.04852365870939099, "acc_norm": 0.63, "acc_norm_stderr": 0.04852365870939099 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.6981132075471698, "acc_stderr": 0.02825420034443866, "acc_norm": 0.6981132075471698, "acc_norm_stderr": 0.02825420034443866 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.7569444444444444, "acc_stderr": 0.0358687928008034, "acc_norm": 0.7569444444444444, "acc_norm_stderr": 0.0358687928008034 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.54, "acc_stderr": 0.05009082659620332, "acc_norm": 0.54, "acc_norm_stderr": 0.05009082659620332 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.54, "acc_stderr": 0.05009082659620333, "acc_norm": 0.54, "acc_norm_stderr": 0.05009082659620333 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.33, "acc_stderr": 0.047258156262526045, "acc_norm": 0.33, "acc_norm_stderr": 0.047258156262526045 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.6358381502890174, "acc_stderr": 0.03669072477416907, "acc_norm": 0.6358381502890174, "acc_norm_stderr": 0.03669072477416907 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.38235294117647056, "acc_stderr": 0.04835503696107223, "acc_norm": 0.38235294117647056, "acc_norm_stderr": 0.04835503696107223 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.72, "acc_stderr": 0.04512608598542127, "acc_norm": 0.72, "acc_norm_stderr": 0.04512608598542127 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.5702127659574469, "acc_stderr": 0.03236214467715564, "acc_norm": 0.5702127659574469, "acc_norm_stderr": 0.03236214467715564 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.49122807017543857, "acc_stderr": 0.04702880432049615, "acc_norm": 0.49122807017543857, "acc_norm_stderr": 0.04702880432049615 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.5586206896551724, "acc_stderr": 0.04137931034482758, "acc_norm": 0.5586206896551724, "acc_norm_stderr": 0.04137931034482758 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.42857142857142855, "acc_stderr": 0.025487187147859375, "acc_norm": 0.42857142857142855, "acc_norm_stderr": 0.025487187147859375 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.4523809523809524, "acc_stderr": 0.044518079590553275, "acc_norm": 0.4523809523809524, "acc_norm_stderr": 0.044518079590553275 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.33, "acc_stderr": 0.04725815626252604, "acc_norm": 0.33, "acc_norm_stderr": 0.04725815626252604 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.7935483870967742, "acc_stderr": 0.023025899617188723, "acc_norm": 0.7935483870967742, "acc_norm_stderr": 0.023025899617188723 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.4876847290640394, "acc_stderr": 0.035169204442208966, "acc_norm": 0.4876847290640394, "acc_norm_stderr": 0.035169204442208966 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.72, "acc_stderr": 0.04512608598542127, "acc_norm": 0.72, "acc_norm_stderr": 0.04512608598542127 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.806060606060606, "acc_stderr": 0.03087414513656209, "acc_norm": 0.806060606060606, "acc_norm_stderr": 0.03087414513656209 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.7828282828282829, "acc_stderr": 0.029376616484945633, "acc_norm": 0.7828282828282829, "acc_norm_stderr": 0.029376616484945633 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.8808290155440415, "acc_stderr": 0.023381935348121427, "acc_norm": 0.8808290155440415, "acc_norm_stderr": 0.023381935348121427 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.6717948717948717, "acc_stderr": 0.023807633198657266, "acc_norm": 0.6717948717948717, "acc_norm_stderr": 0.023807633198657266 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.34444444444444444, "acc_stderr": 0.028972648884844267, "acc_norm": 0.34444444444444444, "acc_norm_stderr": 0.028972648884844267 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.7058823529411765, "acc_stderr": 0.029597329730978093, "acc_norm": 0.7058823529411765, "acc_norm_stderr": 0.029597329730978093 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.33774834437086093, "acc_stderr": 0.03861557546255169, "acc_norm": 0.33774834437086093, "acc_norm_stderr": 0.03861557546255169 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.8513761467889909, "acc_stderr": 0.015251253773660836, "acc_norm": 0.8513761467889909, "acc_norm_stderr": 0.015251253773660836 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.5231481481481481, "acc_stderr": 0.03406315360711507, "acc_norm": 0.5231481481481481, "acc_norm_stderr": 0.03406315360711507 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.8382352941176471, "acc_stderr": 0.025845017986926917, "acc_norm": 0.8382352941176471, "acc_norm_stderr": 0.025845017986926917 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.8016877637130801, "acc_stderr": 0.02595502084162112, "acc_norm": 0.8016877637130801, "acc_norm_stderr": 0.02595502084162112 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.6636771300448431, "acc_stderr": 0.031708824268455, "acc_norm": 0.6636771300448431, "acc_norm_stderr": 0.031708824268455 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.7938931297709924, "acc_stderr": 0.03547771004159465, "acc_norm": 0.7938931297709924, "acc_norm_stderr": 0.03547771004159465 }, "harness|hendrycksTest-international_law|5": { "acc": 0.768595041322314, "acc_stderr": 0.03849856098794088, "acc_norm": 0.768595041322314, "acc_norm_stderr": 0.03849856098794088 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.7777777777777778, "acc_stderr": 0.0401910747255735, "acc_norm": 0.7777777777777778, "acc_norm_stderr": 0.0401910747255735 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.7607361963190185, "acc_stderr": 0.0335195387952127, "acc_norm": 0.7607361963190185, "acc_norm_stderr": 0.0335195387952127 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.42857142857142855, "acc_stderr": 0.04697113923010212, "acc_norm": 0.42857142857142855, "acc_norm_stderr": 0.04697113923010212 }, "harness|hendrycksTest-management|5": { "acc": 0.7864077669902912, "acc_stderr": 0.040580420156460344, "acc_norm": 0.7864077669902912, "acc_norm_stderr": 0.040580420156460344 }, "harness|hendrycksTest-marketing|5": { "acc": 0.8803418803418803, "acc_stderr": 0.021262719400406974, "acc_norm": 0.8803418803418803, "acc_norm_stderr": 0.021262719400406974 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.73, "acc_stderr": 0.044619604333847394, "acc_norm": 0.73, "acc_norm_stderr": 0.044619604333847394 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.8212005108556832, "acc_stderr": 0.013702643715368983, "acc_norm": 0.8212005108556832, "acc_norm_stderr": 0.013702643715368983 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.7225433526011561, "acc_stderr": 0.024105712607754307, "acc_norm": 0.7225433526011561, "acc_norm_stderr": 0.024105712607754307 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.4301675977653631, "acc_stderr": 0.01655860163604104, "acc_norm": 0.4301675977653631, "acc_norm_stderr": 0.01655860163604104 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.7320261437908496, "acc_stderr": 0.025360603796242557, "acc_norm": 0.7320261437908496, "acc_norm_stderr": 0.025360603796242557 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.707395498392283, "acc_stderr": 0.02583989833487798, "acc_norm": 0.707395498392283, "acc_norm_stderr": 0.02583989833487798 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.7345679012345679, "acc_stderr": 0.024569223600460845, "acc_norm": 0.7345679012345679, "acc_norm_stderr": 0.024569223600460845 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.5070921985815603, "acc_stderr": 0.02982449855912901, "acc_norm": 0.5070921985815603, "acc_norm_stderr": 0.02982449855912901 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.46153846153846156, "acc_stderr": 0.01273239828619044, "acc_norm": 0.46153846153846156, "acc_norm_stderr": 0.01273239828619044 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.6838235294117647, "acc_stderr": 0.02824568739146292, "acc_norm": 0.6838235294117647, "acc_norm_stderr": 0.02824568739146292 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.6683006535947712, "acc_stderr": 0.019047485239360378, "acc_norm": 0.6683006535947712, "acc_norm_stderr": 0.019047485239360378 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.6909090909090909, "acc_stderr": 0.044262946482000985, "acc_norm": 0.6909090909090909, "acc_norm_stderr": 0.044262946482000985 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.7387755102040816, "acc_stderr": 0.028123429335142783, "acc_norm": 0.7387755102040816, "acc_norm_stderr": 0.028123429335142783 }, "harness|hendrycksTest-sociology|5": { "acc": 0.8308457711442786, "acc_stderr": 0.02650859065623327, "acc_norm": 0.8308457711442786, "acc_norm_stderr": 0.02650859065623327 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.83, "acc_stderr": 0.0377525168068637, "acc_norm": 0.83, "acc_norm_stderr": 0.0377525168068637 }, "harness|hendrycksTest-virology|5": { "acc": 0.5662650602409639, "acc_stderr": 0.03858158940685515, "acc_norm": 0.5662650602409639, "acc_norm_stderr": 0.03858158940685515 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.8128654970760234, "acc_stderr": 0.029913127232368043, "acc_norm": 0.8128654970760234, "acc_norm_stderr": 0.029913127232368043 }, "harness|truthfulqa:mc|0": { "mc1": 0.6193390452876377, "mc1_stderr": 0.016997627871907915, "mc2": 0.7673830386789108, "mc2_stderr": 0.013988013317866293 }, "harness|winogrande|5": { "acc": 0.8737174427782163, "acc_stderr": 0.009335559129908475 }, "harness|gsm8k|5": { "acc": 0.5686125852918877, "acc_stderr": 0.013642195352511568 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_rizla__raccoon-small
[ "region:us" ]
2024-02-02T09:56:37+00:00
{"pretty_name": "Evaluation run of rizla/raccoon-small", "dataset_summary": "Dataset automatically created during the evaluation run of model [rizla/raccoon-small](https://huggingface.co/rizla/raccoon-small) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_rizla__raccoon-small\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-02-02T10:01:35.686366](https://huggingface.co/datasets/open-llm-leaderboard/details_rizla__raccoon-small/blob/main/results_2024-02-02T10-01-35.686366.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.650595848646737,\n \"acc_stderr\": 0.03218536411440889,\n \"acc_norm\": 0.651267003548253,\n \"acc_norm_stderr\": 0.032861158588995604,\n \"mc1\": 0.6193390452876377,\n \"mc1_stderr\": 0.016997627871907915,\n \"mc2\": 0.7673830386789108,\n \"mc2_stderr\": 0.013988013317866293\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.7320819112627986,\n \"acc_stderr\": 0.01294203019513643,\n \"acc_norm\": 0.7440273037542662,\n \"acc_norm_stderr\": 0.01275301324124452\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.7184823740290779,\n \"acc_stderr\": 0.00448820175664258,\n \"acc_norm\": 0.8872734515036845,\n \"acc_norm_stderr\": 0.003156118964752944\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.35,\n \"acc_stderr\": 0.0479372485441102,\n \"acc_norm\": 0.35,\n \"acc_norm_stderr\": 0.0479372485441102\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.6148148148148148,\n \"acc_stderr\": 0.04203921040156279,\n \"acc_norm\": 0.6148148148148148,\n \"acc_norm_stderr\": 0.04203921040156279\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.6907894736842105,\n \"acc_stderr\": 0.037610708698674805,\n \"acc_norm\": 0.6907894736842105,\n \"acc_norm_stderr\": 0.037610708698674805\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.63,\n \"acc_stderr\": 0.04852365870939099,\n \"acc_norm\": 0.63,\n \"acc_norm_stderr\": 0.04852365870939099\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.6981132075471698,\n \"acc_stderr\": 0.02825420034443866,\n \"acc_norm\": 0.6981132075471698,\n \"acc_norm_stderr\": 0.02825420034443866\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.7569444444444444,\n \"acc_stderr\": 0.0358687928008034,\n \"acc_norm\": 0.7569444444444444,\n \"acc_norm_stderr\": 0.0358687928008034\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.54,\n \"acc_stderr\": 0.05009082659620332,\n \"acc_norm\": 0.54,\n \"acc_norm_stderr\": 0.05009082659620332\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.54,\n \"acc_stderr\": 0.05009082659620333,\n \"acc_norm\": 0.54,\n \"acc_norm_stderr\": 0.05009082659620333\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.33,\n \"acc_stderr\": 0.047258156262526045,\n \"acc_norm\": 0.33,\n \"acc_norm_stderr\": 0.047258156262526045\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.6358381502890174,\n \"acc_stderr\": 0.03669072477416907,\n \"acc_norm\": 0.6358381502890174,\n \"acc_norm_stderr\": 0.03669072477416907\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.38235294117647056,\n \"acc_stderr\": 0.04835503696107223,\n \"acc_norm\": 0.38235294117647056,\n \"acc_norm_stderr\": 0.04835503696107223\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.72,\n \"acc_stderr\": 0.04512608598542127,\n \"acc_norm\": 0.72,\n \"acc_norm_stderr\": 0.04512608598542127\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.5702127659574469,\n \"acc_stderr\": 0.03236214467715564,\n \"acc_norm\": 0.5702127659574469,\n \"acc_norm_stderr\": 0.03236214467715564\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.49122807017543857,\n \"acc_stderr\": 0.04702880432049615,\n \"acc_norm\": 0.49122807017543857,\n \"acc_norm_stderr\": 0.04702880432049615\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.5586206896551724,\n \"acc_stderr\": 0.04137931034482758,\n \"acc_norm\": 0.5586206896551724,\n \"acc_norm_stderr\": 0.04137931034482758\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.42857142857142855,\n \"acc_stderr\": 0.025487187147859375,\n \"acc_norm\": 0.42857142857142855,\n \"acc_norm_stderr\": 0.025487187147859375\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.4523809523809524,\n \"acc_stderr\": 0.044518079590553275,\n \"acc_norm\": 0.4523809523809524,\n \"acc_norm_stderr\": 0.044518079590553275\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.33,\n \"acc_stderr\": 0.04725815626252604,\n \"acc_norm\": 0.33,\n \"acc_norm_stderr\": 0.04725815626252604\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.7935483870967742,\n \"acc_stderr\": 0.023025899617188723,\n \"acc_norm\": 0.7935483870967742,\n \"acc_norm_stderr\": 0.023025899617188723\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.4876847290640394,\n \"acc_stderr\": 0.035169204442208966,\n \"acc_norm\": 0.4876847290640394,\n \"acc_norm_stderr\": 0.035169204442208966\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.72,\n \"acc_stderr\": 0.04512608598542127,\n \"acc_norm\": 0.72,\n \"acc_norm_stderr\": 0.04512608598542127\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.806060606060606,\n \"acc_stderr\": 0.03087414513656209,\n \"acc_norm\": 0.806060606060606,\n \"acc_norm_stderr\": 0.03087414513656209\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.7828282828282829,\n \"acc_stderr\": 0.029376616484945633,\n \"acc_norm\": 0.7828282828282829,\n \"acc_norm_stderr\": 0.029376616484945633\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.8808290155440415,\n \"acc_stderr\": 0.023381935348121427,\n \"acc_norm\": 0.8808290155440415,\n \"acc_norm_stderr\": 0.023381935348121427\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.6717948717948717,\n \"acc_stderr\": 0.023807633198657266,\n \"acc_norm\": 0.6717948717948717,\n \"acc_norm_stderr\": 0.023807633198657266\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.34444444444444444,\n \"acc_stderr\": 0.028972648884844267,\n \"acc_norm\": 0.34444444444444444,\n \"acc_norm_stderr\": 0.028972648884844267\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.7058823529411765,\n \"acc_stderr\": 0.029597329730978093,\n \"acc_norm\": 0.7058823529411765,\n \"acc_norm_stderr\": 0.029597329730978093\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.33774834437086093,\n \"acc_stderr\": 0.03861557546255169,\n \"acc_norm\": 0.33774834437086093,\n \"acc_norm_stderr\": 0.03861557546255169\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.8513761467889909,\n \"acc_stderr\": 0.015251253773660836,\n \"acc_norm\": 0.8513761467889909,\n \"acc_norm_stderr\": 0.015251253773660836\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.5231481481481481,\n \"acc_stderr\": 0.03406315360711507,\n \"acc_norm\": 0.5231481481481481,\n \"acc_norm_stderr\": 0.03406315360711507\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.8382352941176471,\n \"acc_stderr\": 0.025845017986926917,\n \"acc_norm\": 0.8382352941176471,\n \"acc_norm_stderr\": 0.025845017986926917\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.8016877637130801,\n \"acc_stderr\": 0.02595502084162112,\n \"acc_norm\": 0.8016877637130801,\n \"acc_norm_stderr\": 0.02595502084162112\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.6636771300448431,\n \"acc_stderr\": 0.031708824268455,\n \"acc_norm\": 0.6636771300448431,\n \"acc_norm_stderr\": 0.031708824268455\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.7938931297709924,\n \"acc_stderr\": 0.03547771004159465,\n \"acc_norm\": 0.7938931297709924,\n \"acc_norm_stderr\": 0.03547771004159465\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.768595041322314,\n \"acc_stderr\": 0.03849856098794088,\n \"acc_norm\": 0.768595041322314,\n \"acc_norm_stderr\": 0.03849856098794088\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.7777777777777778,\n \"acc_stderr\": 0.0401910747255735,\n \"acc_norm\": 0.7777777777777778,\n \"acc_norm_stderr\": 0.0401910747255735\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.7607361963190185,\n \"acc_stderr\": 0.0335195387952127,\n \"acc_norm\": 0.7607361963190185,\n \"acc_norm_stderr\": 0.0335195387952127\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.42857142857142855,\n \"acc_stderr\": 0.04697113923010212,\n \"acc_norm\": 0.42857142857142855,\n \"acc_norm_stderr\": 0.04697113923010212\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.7864077669902912,\n \"acc_stderr\": 0.040580420156460344,\n \"acc_norm\": 0.7864077669902912,\n \"acc_norm_stderr\": 0.040580420156460344\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8803418803418803,\n \"acc_stderr\": 0.021262719400406974,\n \"acc_norm\": 0.8803418803418803,\n \"acc_norm_stderr\": 0.021262719400406974\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.73,\n \"acc_stderr\": 0.044619604333847394,\n \"acc_norm\": 0.73,\n \"acc_norm_stderr\": 0.044619604333847394\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.8212005108556832,\n \"acc_stderr\": 0.013702643715368983,\n \"acc_norm\": 0.8212005108556832,\n \"acc_norm_stderr\": 0.013702643715368983\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.7225433526011561,\n \"acc_stderr\": 0.024105712607754307,\n \"acc_norm\": 0.7225433526011561,\n \"acc_norm_stderr\": 0.024105712607754307\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.4301675977653631,\n \"acc_stderr\": 0.01655860163604104,\n \"acc_norm\": 0.4301675977653631,\n \"acc_norm_stderr\": 0.01655860163604104\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.7320261437908496,\n \"acc_stderr\": 0.025360603796242557,\n \"acc_norm\": 0.7320261437908496,\n \"acc_norm_stderr\": 0.025360603796242557\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.707395498392283,\n \"acc_stderr\": 0.02583989833487798,\n \"acc_norm\": 0.707395498392283,\n \"acc_norm_stderr\": 0.02583989833487798\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.7345679012345679,\n \"acc_stderr\": 0.024569223600460845,\n \"acc_norm\": 0.7345679012345679,\n \"acc_norm_stderr\": 0.024569223600460845\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.5070921985815603,\n \"acc_stderr\": 0.02982449855912901,\n \"acc_norm\": 0.5070921985815603,\n \"acc_norm_stderr\": 0.02982449855912901\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.46153846153846156,\n \"acc_stderr\": 0.01273239828619044,\n \"acc_norm\": 0.46153846153846156,\n \"acc_norm_stderr\": 0.01273239828619044\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.6838235294117647,\n \"acc_stderr\": 0.02824568739146292,\n \"acc_norm\": 0.6838235294117647,\n \"acc_norm_stderr\": 0.02824568739146292\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.6683006535947712,\n \"acc_stderr\": 0.019047485239360378,\n \"acc_norm\": 0.6683006535947712,\n \"acc_norm_stderr\": 0.019047485239360378\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6909090909090909,\n \"acc_stderr\": 0.044262946482000985,\n \"acc_norm\": 0.6909090909090909,\n \"acc_norm_stderr\": 0.044262946482000985\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.7387755102040816,\n \"acc_stderr\": 0.028123429335142783,\n \"acc_norm\": 0.7387755102040816,\n \"acc_norm_stderr\": 0.028123429335142783\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.8308457711442786,\n \"acc_stderr\": 0.02650859065623327,\n \"acc_norm\": 0.8308457711442786,\n \"acc_norm_stderr\": 0.02650859065623327\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.83,\n \"acc_stderr\": 0.0377525168068637,\n \"acc_norm\": 0.83,\n \"acc_norm_stderr\": 0.0377525168068637\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.5662650602409639,\n \"acc_stderr\": 0.03858158940685515,\n \"acc_norm\": 0.5662650602409639,\n \"acc_norm_stderr\": 0.03858158940685515\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.8128654970760234,\n \"acc_stderr\": 0.029913127232368043,\n \"acc_norm\": 0.8128654970760234,\n \"acc_norm_stderr\": 0.029913127232368043\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.6193390452876377,\n \"mc1_stderr\": 0.016997627871907915,\n \"mc2\": 0.7673830386789108,\n \"mc2_stderr\": 0.013988013317866293\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.8737174427782163,\n \"acc_stderr\": 0.009335559129908475\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.5686125852918877,\n \"acc_stderr\": 0.013642195352511568\n }\n}\n```", "repo_url": "https://huggingface.co/rizla/raccoon-small", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_02_02T09_54_15.565869", "path": ["**/details_harness|arc:challenge|25_2024-02-02T09-54-15.565869.parquet"]}, {"split": "2024_02_02T10_01_35.686366", "path": ["**/details_harness|arc:challenge|25_2024-02-02T10-01-35.686366.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-02-02T10-01-35.686366.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_02_02T09_54_15.565869", "path": ["**/details_harness|gsm8k|5_2024-02-02T09-54-15.565869.parquet"]}, {"split": "2024_02_02T10_01_35.686366", "path": ["**/details_harness|gsm8k|5_2024-02-02T10-01-35.686366.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-02-02T10-01-35.686366.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_02_02T09_54_15.565869", "path": ["**/details_harness|hellaswag|10_2024-02-02T09-54-15.565869.parquet"]}, {"split": "2024_02_02T10_01_35.686366", "path": ["**/details_harness|hellaswag|10_2024-02-02T10-01-35.686366.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-02-02T10-01-35.686366.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_02_02T09_54_15.565869", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-02T09-54-15.565869.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-02-02T09-54-15.565869.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-02-02T09-54-15.565869.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-02-02T09-54-15.565869.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-02T09-54-15.565869.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-02-02T09-54-15.565869.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-02-02T09-54-15.565869.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-02-02T09-54-15.565869.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-02-02T09-54-15.565869.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-02-02T09-54-15.565869.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-02-02T09-54-15.565869.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-02-02T09-54-15.565869.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-02T09-54-15.565869.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-02-02T09-54-15.565869.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-02T09-54-15.565869.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-02T09-54-15.565869.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-02-02T09-54-15.565869.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-02-02T09-54-15.565869.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-02-02T09-54-15.565869.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-02T09-54-15.565869.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-02T09-54-15.565869.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-02T09-54-15.565869.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-02-02T09-54-15.565869.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-02T09-54-15.565869.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-02T09-54-15.565869.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-02T09-54-15.565869.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-02T09-54-15.565869.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-02-02T09-54-15.565869.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-02T09-54-15.565869.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-02T09-54-15.565869.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-02T09-54-15.565869.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-02T09-54-15.565869.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-02-02T09-54-15.565869.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-02-02T09-54-15.565869.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-02-02T09-54-15.565869.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-02-02T09-54-15.565869.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-02T09-54-15.565869.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-02-02T09-54-15.565869.parquet", "**/details_harness|hendrycksTest-management|5_2024-02-02T09-54-15.565869.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-02-02T09-54-15.565869.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-02-02T09-54-15.565869.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-02-02T09-54-15.565869.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-02-02T09-54-15.565869.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-02T09-54-15.565869.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-02-02T09-54-15.565869.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-02-02T09-54-15.565869.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-02-02T09-54-15.565869.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-02-02T09-54-15.565869.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-02-02T09-54-15.565869.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-02-02T09-54-15.565869.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-02-02T09-54-15.565869.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-02-02T09-54-15.565869.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-02-02T09-54-15.565869.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-02-02T09-54-15.565869.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-02T09-54-15.565869.parquet", "**/details_harness|hendrycksTest-virology|5_2024-02-02T09-54-15.565869.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-02-02T09-54-15.565869.parquet"]}, {"split": "2024_02_02T10_01_35.686366", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-02T10-01-35.686366.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-02-02T10-01-35.686366.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-02-02T10-01-35.686366.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-02-02T10-01-35.686366.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-02T10-01-35.686366.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-02-02T10-01-35.686366.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-02-02T10-01-35.686366.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-02-02T10-01-35.686366.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-02-02T10-01-35.686366.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-02-02T10-01-35.686366.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-02-02T10-01-35.686366.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-02-02T10-01-35.686366.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-02T10-01-35.686366.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-02-02T10-01-35.686366.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-02T10-01-35.686366.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-02T10-01-35.686366.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-02-02T10-01-35.686366.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-02-02T10-01-35.686366.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-02-02T10-01-35.686366.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-02T10-01-35.686366.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-02T10-01-35.686366.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-02T10-01-35.686366.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-02-02T10-01-35.686366.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-02T10-01-35.686366.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-02T10-01-35.686366.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-02T10-01-35.686366.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-02T10-01-35.686366.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-02-02T10-01-35.686366.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-02T10-01-35.686366.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-02T10-01-35.686366.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-02T10-01-35.686366.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-02T10-01-35.686366.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-02-02T10-01-35.686366.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-02-02T10-01-35.686366.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-02-02T10-01-35.686366.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-02-02T10-01-35.686366.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-02T10-01-35.686366.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-02-02T10-01-35.686366.parquet", "**/details_harness|hendrycksTest-management|5_2024-02-02T10-01-35.686366.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-02-02T10-01-35.686366.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-02-02T10-01-35.686366.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-02-02T10-01-35.686366.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-02-02T10-01-35.686366.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-02T10-01-35.686366.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-02-02T10-01-35.686366.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-02-02T10-01-35.686366.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-02-02T10-01-35.686366.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-02-02T10-01-35.686366.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-02-02T10-01-35.686366.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-02-02T10-01-35.686366.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-02-02T10-01-35.686366.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-02-02T10-01-35.686366.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-02-02T10-01-35.686366.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-02-02T10-01-35.686366.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-02T10-01-35.686366.parquet", "**/details_harness|hendrycksTest-virology|5_2024-02-02T10-01-35.686366.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-02-02T10-01-35.686366.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-02T10-01-35.686366.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-02-02T10-01-35.686366.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-02-02T10-01-35.686366.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-02-02T10-01-35.686366.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-02T10-01-35.686366.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-02-02T10-01-35.686366.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-02-02T10-01-35.686366.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-02-02T10-01-35.686366.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-02-02T10-01-35.686366.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-02-02T10-01-35.686366.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-02-02T10-01-35.686366.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-02-02T10-01-35.686366.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-02T10-01-35.686366.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-02-02T10-01-35.686366.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-02T10-01-35.686366.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-02T10-01-35.686366.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-02-02T10-01-35.686366.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-02-02T10-01-35.686366.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-02-02T10-01-35.686366.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-02T10-01-35.686366.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-02T10-01-35.686366.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-02T10-01-35.686366.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-02-02T10-01-35.686366.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-02T10-01-35.686366.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-02T10-01-35.686366.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-02T10-01-35.686366.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-02T10-01-35.686366.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-02-02T10-01-35.686366.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-02T10-01-35.686366.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-02T10-01-35.686366.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-02T10-01-35.686366.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-02T10-01-35.686366.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-02-02T10-01-35.686366.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-02-02T10-01-35.686366.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-02-02T10-01-35.686366.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-02-02T10-01-35.686366.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-02T10-01-35.686366.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-02-02T10-01-35.686366.parquet", "**/details_harness|hendrycksTest-management|5_2024-02-02T10-01-35.686366.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-02-02T10-01-35.686366.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-02-02T10-01-35.686366.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-02-02T10-01-35.686366.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-02-02T10-01-35.686366.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-02T10-01-35.686366.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-02-02T10-01-35.686366.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-02-02T10-01-35.686366.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-02-02T10-01-35.686366.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-02-02T10-01-35.686366.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-02-02T10-01-35.686366.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-02-02T10-01-35.686366.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-02-02T10-01-35.686366.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-02-02T10-01-35.686366.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-02-02T10-01-35.686366.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-02-02T10-01-35.686366.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-02T10-01-35.686366.parquet", "**/details_harness|hendrycksTest-virology|5_2024-02-02T10-01-35.686366.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-02-02T10-01-35.686366.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_02_02T09_54_15.565869", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-02T09-54-15.565869.parquet"]}, {"split": "2024_02_02T10_01_35.686366", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-02T10-01-35.686366.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-02T10-01-35.686366.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_02_02T09_54_15.565869", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-02-02T09-54-15.565869.parquet"]}, {"split": "2024_02_02T10_01_35.686366", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-02-02T10-01-35.686366.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-02-02T10-01-35.686366.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_02_02T09_54_15.565869", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-02-02T09-54-15.565869.parquet"]}, {"split": "2024_02_02T10_01_35.686366", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-02-02T10-01-35.686366.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-02-02T10-01-35.686366.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_02_02T09_54_15.565869", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-02-02T09-54-15.565869.parquet"]}, {"split": "2024_02_02T10_01_35.686366", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-02-02T10-01-35.686366.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-02-02T10-01-35.686366.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_02_02T09_54_15.565869", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-02T09-54-15.565869.parquet"]}, {"split": "2024_02_02T10_01_35.686366", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-02T10-01-35.686366.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-02T10-01-35.686366.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_02_02T09_54_15.565869", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-02-02T09-54-15.565869.parquet"]}, {"split": "2024_02_02T10_01_35.686366", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-02-02T10-01-35.686366.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-02-02T10-01-35.686366.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_02_02T09_54_15.565869", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-02-02T09-54-15.565869.parquet"]}, {"split": "2024_02_02T10_01_35.686366", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-02-02T10-01-35.686366.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-02-02T10-01-35.686366.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_02_02T09_54_15.565869", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-02-02T09-54-15.565869.parquet"]}, {"split": "2024_02_02T10_01_35.686366", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-02-02T10-01-35.686366.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-02-02T10-01-35.686366.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_02_02T09_54_15.565869", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-02-02T09-54-15.565869.parquet"]}, {"split": "2024_02_02T10_01_35.686366", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-02-02T10-01-35.686366.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-02-02T10-01-35.686366.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_02_02T09_54_15.565869", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-02-02T09-54-15.565869.parquet"]}, {"split": "2024_02_02T10_01_35.686366", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-02-02T10-01-35.686366.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-02-02T10-01-35.686366.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_02_02T09_54_15.565869", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-02-02T09-54-15.565869.parquet"]}, {"split": "2024_02_02T10_01_35.686366", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-02-02T10-01-35.686366.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-02-02T10-01-35.686366.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_02_02T09_54_15.565869", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-02-02T09-54-15.565869.parquet"]}, {"split": "2024_02_02T10_01_35.686366", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-02-02T10-01-35.686366.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-02-02T10-01-35.686366.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_02_02T09_54_15.565869", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-02T09-54-15.565869.parquet"]}, {"split": "2024_02_02T10_01_35.686366", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-02T10-01-35.686366.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-02T10-01-35.686366.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_02_02T09_54_15.565869", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-02-02T09-54-15.565869.parquet"]}, {"split": "2024_02_02T10_01_35.686366", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-02-02T10-01-35.686366.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-02-02T10-01-35.686366.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_02_02T09_54_15.565869", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-02T09-54-15.565869.parquet"]}, {"split": "2024_02_02T10_01_35.686366", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-02T10-01-35.686366.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-02T10-01-35.686366.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_02_02T09_54_15.565869", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-02T09-54-15.565869.parquet"]}, {"split": "2024_02_02T10_01_35.686366", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-02T10-01-35.686366.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-02T10-01-35.686366.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_02_02T09_54_15.565869", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-02-02T09-54-15.565869.parquet"]}, {"split": "2024_02_02T10_01_35.686366", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-02-02T10-01-35.686366.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-02-02T10-01-35.686366.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_02_02T09_54_15.565869", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-02-02T09-54-15.565869.parquet"]}, {"split": "2024_02_02T10_01_35.686366", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-02-02T10-01-35.686366.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-02-02T10-01-35.686366.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_02_02T09_54_15.565869", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-02-02T09-54-15.565869.parquet"]}, {"split": "2024_02_02T10_01_35.686366", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-02-02T10-01-35.686366.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-02-02T10-01-35.686366.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_02_02T09_54_15.565869", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-02T09-54-15.565869.parquet"]}, {"split": "2024_02_02T10_01_35.686366", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-02T10-01-35.686366.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-02T10-01-35.686366.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_02_02T09_54_15.565869", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-02T09-54-15.565869.parquet"]}, {"split": "2024_02_02T10_01_35.686366", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-02T10-01-35.686366.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-02T10-01-35.686366.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_02_02T09_54_15.565869", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-02T09-54-15.565869.parquet"]}, {"split": "2024_02_02T10_01_35.686366", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-02T10-01-35.686366.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-02T10-01-35.686366.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_02_02T09_54_15.565869", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-02-02T09-54-15.565869.parquet"]}, {"split": "2024_02_02T10_01_35.686366", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-02-02T10-01-35.686366.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-02-02T10-01-35.686366.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_02_02T09_54_15.565869", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-02T09-54-15.565869.parquet"]}, {"split": "2024_02_02T10_01_35.686366", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-02T10-01-35.686366.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-02T10-01-35.686366.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_02_02T09_54_15.565869", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-02T09-54-15.565869.parquet"]}, {"split": "2024_02_02T10_01_35.686366", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-02T10-01-35.686366.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-02T10-01-35.686366.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_02_02T09_54_15.565869", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-02T09-54-15.565869.parquet"]}, {"split": "2024_02_02T10_01_35.686366", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-02T10-01-35.686366.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-02T10-01-35.686366.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_02_02T09_54_15.565869", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-02T09-54-15.565869.parquet"]}, {"split": "2024_02_02T10_01_35.686366", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-02T10-01-35.686366.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-02T10-01-35.686366.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_02_02T09_54_15.565869", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-02-02T09-54-15.565869.parquet"]}, {"split": "2024_02_02T10_01_35.686366", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-02-02T10-01-35.686366.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-02-02T10-01-35.686366.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_02_02T09_54_15.565869", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-02T09-54-15.565869.parquet"]}, {"split": "2024_02_02T10_01_35.686366", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-02T10-01-35.686366.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-02T10-01-35.686366.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_02_02T09_54_15.565869", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-02T09-54-15.565869.parquet"]}, {"split": "2024_02_02T10_01_35.686366", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-02T10-01-35.686366.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-02T10-01-35.686366.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_02_02T09_54_15.565869", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-02T09-54-15.565869.parquet"]}, {"split": "2024_02_02T10_01_35.686366", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-02T10-01-35.686366.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-02T10-01-35.686366.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_02_02T09_54_15.565869", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-02T09-54-15.565869.parquet"]}, {"split": "2024_02_02T10_01_35.686366", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-02T10-01-35.686366.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-02T10-01-35.686366.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_02_02T09_54_15.565869", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-02-02T09-54-15.565869.parquet"]}, {"split": "2024_02_02T10_01_35.686366", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-02-02T10-01-35.686366.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-02-02T10-01-35.686366.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_02_02T09_54_15.565869", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-02-02T09-54-15.565869.parquet"]}, {"split": "2024_02_02T10_01_35.686366", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-02-02T10-01-35.686366.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-02-02T10-01-35.686366.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_02_02T09_54_15.565869", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-02-02T09-54-15.565869.parquet"]}, {"split": "2024_02_02T10_01_35.686366", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-02-02T10-01-35.686366.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-02-02T10-01-35.686366.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_02_02T09_54_15.565869", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-02-02T09-54-15.565869.parquet"]}, {"split": "2024_02_02T10_01_35.686366", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-02-02T10-01-35.686366.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-02-02T10-01-35.686366.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_02_02T09_54_15.565869", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-02T09-54-15.565869.parquet"]}, {"split": "2024_02_02T10_01_35.686366", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-02T10-01-35.686366.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-02T10-01-35.686366.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_02_02T09_54_15.565869", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-02-02T09-54-15.565869.parquet"]}, {"split": "2024_02_02T10_01_35.686366", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-02-02T10-01-35.686366.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-02-02T10-01-35.686366.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_02_02T09_54_15.565869", "path": ["**/details_harness|hendrycksTest-management|5_2024-02-02T09-54-15.565869.parquet"]}, {"split": "2024_02_02T10_01_35.686366", "path": ["**/details_harness|hendrycksTest-management|5_2024-02-02T10-01-35.686366.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-02-02T10-01-35.686366.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_02_02T09_54_15.565869", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-02-02T09-54-15.565869.parquet"]}, {"split": "2024_02_02T10_01_35.686366", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-02-02T10-01-35.686366.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-02-02T10-01-35.686366.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_02_02T09_54_15.565869", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-02-02T09-54-15.565869.parquet"]}, {"split": "2024_02_02T10_01_35.686366", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-02-02T10-01-35.686366.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-02-02T10-01-35.686366.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_02_02T09_54_15.565869", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-02-02T09-54-15.565869.parquet"]}, {"split": "2024_02_02T10_01_35.686366", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-02-02T10-01-35.686366.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-02-02T10-01-35.686366.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_02_02T09_54_15.565869", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-02-02T09-54-15.565869.parquet"]}, {"split": "2024_02_02T10_01_35.686366", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-02-02T10-01-35.686366.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-02-02T10-01-35.686366.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_02_02T09_54_15.565869", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-02T09-54-15.565869.parquet"]}, {"split": "2024_02_02T10_01_35.686366", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-02T10-01-35.686366.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-02T10-01-35.686366.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_02_02T09_54_15.565869", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-02-02T09-54-15.565869.parquet"]}, {"split": "2024_02_02T10_01_35.686366", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-02-02T10-01-35.686366.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-02-02T10-01-35.686366.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_02_02T09_54_15.565869", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-02-02T09-54-15.565869.parquet"]}, {"split": "2024_02_02T10_01_35.686366", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-02-02T10-01-35.686366.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-02-02T10-01-35.686366.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_02_02T09_54_15.565869", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-02-02T09-54-15.565869.parquet"]}, {"split": "2024_02_02T10_01_35.686366", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-02-02T10-01-35.686366.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-02-02T10-01-35.686366.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_02_02T09_54_15.565869", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-02-02T09-54-15.565869.parquet"]}, {"split": "2024_02_02T10_01_35.686366", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-02-02T10-01-35.686366.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-02-02T10-01-35.686366.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_02_02T09_54_15.565869", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-02-02T09-54-15.565869.parquet"]}, {"split": "2024_02_02T10_01_35.686366", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-02-02T10-01-35.686366.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-02-02T10-01-35.686366.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_02_02T09_54_15.565869", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-02-02T09-54-15.565869.parquet"]}, {"split": "2024_02_02T10_01_35.686366", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-02-02T10-01-35.686366.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-02-02T10-01-35.686366.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_02_02T09_54_15.565869", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-02-02T09-54-15.565869.parquet"]}, {"split": "2024_02_02T10_01_35.686366", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-02-02T10-01-35.686366.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-02-02T10-01-35.686366.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_02_02T09_54_15.565869", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-02-02T09-54-15.565869.parquet"]}, {"split": "2024_02_02T10_01_35.686366", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-02-02T10-01-35.686366.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-02-02T10-01-35.686366.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_02_02T09_54_15.565869", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-02-02T09-54-15.565869.parquet"]}, {"split": "2024_02_02T10_01_35.686366", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-02-02T10-01-35.686366.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-02-02T10-01-35.686366.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_02_02T09_54_15.565869", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-02-02T09-54-15.565869.parquet"]}, {"split": "2024_02_02T10_01_35.686366", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-02-02T10-01-35.686366.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-02-02T10-01-35.686366.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_02_02T09_54_15.565869", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-02T09-54-15.565869.parquet"]}, {"split": "2024_02_02T10_01_35.686366", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-02T10-01-35.686366.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-02T10-01-35.686366.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_02_02T09_54_15.565869", "path": ["**/details_harness|hendrycksTest-virology|5_2024-02-02T09-54-15.565869.parquet"]}, {"split": "2024_02_02T10_01_35.686366", "path": ["**/details_harness|hendrycksTest-virology|5_2024-02-02T10-01-35.686366.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-02-02T10-01-35.686366.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_02_02T09_54_15.565869", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-02-02T09-54-15.565869.parquet"]}, {"split": "2024_02_02T10_01_35.686366", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-02-02T10-01-35.686366.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-02-02T10-01-35.686366.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_02_02T09_54_15.565869", "path": ["**/details_harness|truthfulqa:mc|0_2024-02-02T09-54-15.565869.parquet"]}, {"split": "2024_02_02T10_01_35.686366", "path": ["**/details_harness|truthfulqa:mc|0_2024-02-02T10-01-35.686366.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-02-02T10-01-35.686366.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_02_02T09_54_15.565869", "path": ["**/details_harness|winogrande|5_2024-02-02T09-54-15.565869.parquet"]}, {"split": "2024_02_02T10_01_35.686366", "path": ["**/details_harness|winogrande|5_2024-02-02T10-01-35.686366.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-02-02T10-01-35.686366.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_02_02T09_54_15.565869", "path": ["results_2024-02-02T09-54-15.565869.parquet"]}, {"split": "2024_02_02T10_01_35.686366", "path": ["results_2024-02-02T10-01-35.686366.parquet"]}, {"split": "latest", "path": ["results_2024-02-02T10-01-35.686366.parquet"]}]}]}
2024-02-02T10:04:01+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of rizla/raccoon-small Dataset automatically created during the evaluation run of model rizla/raccoon-small on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-02-02T10:01:35.686366(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of rizla/raccoon-small\n\n\n\nDataset automatically created during the evaluation run of model rizla/raccoon-small on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-02-02T10:01:35.686366(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of rizla/raccoon-small\n\n\n\nDataset automatically created during the evaluation run of model rizla/raccoon-small on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-02-02T10:01:35.686366(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
bb50a099a4df8c2b41d22e1c26e98fdce64197c1
# Dataset Card for Evaluation run of OpenBuddy/openbuddy-mixtral-7bx8-v17.2-32k <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [OpenBuddy/openbuddy-mixtral-7bx8-v17.2-32k](https://huggingface.co/OpenBuddy/openbuddy-mixtral-7bx8-v17.2-32k) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_OpenBuddy__openbuddy-mixtral-7bx8-v17.2-32k", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-02-02T09:54:33.456360](https://huggingface.co/datasets/open-llm-leaderboard/details_OpenBuddy__openbuddy-mixtral-7bx8-v17.2-32k/blob/main/results_2024-02-02T09-54-33.456360.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.18431258568628095, "acc_stderr": 0.027127844586126878, "acc_norm": 0.18293847355265097, "acc_norm_stderr": 0.027828083788311676, "mc1": 0.2521419828641371, "mc1_stderr": 0.015201522246299953, "mc2": NaN, "mc2_stderr": NaN }, "harness|arc:challenge|25": { "acc": 0.2696245733788396, "acc_stderr": 0.012968040686869152, "acc_norm": 0.33532423208191126, "acc_norm_stderr": 0.013796182947785564 }, "harness|hellaswag|10": { "acc": 0.2717586138219478, "acc_stderr": 0.004439569447407354, "acc_norm": 0.31358295160326627, "acc_norm_stderr": 0.004630008293925626 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.3, "acc_stderr": 0.046056618647183814, "acc_norm": 0.3, "acc_norm_stderr": 0.046056618647183814 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.17037037037037037, "acc_stderr": 0.03247781185995593, "acc_norm": 0.17037037037037037, "acc_norm_stderr": 0.03247781185995593 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.13157894736842105, "acc_stderr": 0.027508689533549905, "acc_norm": 0.13157894736842105, "acc_norm_stderr": 0.027508689533549905 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.23, "acc_stderr": 0.04229525846816506, "acc_norm": 0.23, "acc_norm_stderr": 0.04229525846816506 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.16226415094339622, "acc_stderr": 0.022691482872035384, "acc_norm": 0.16226415094339622, "acc_norm_stderr": 0.022691482872035384 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.1527777777777778, "acc_stderr": 0.03008574324856568, "acc_norm": 0.1527777777777778, "acc_norm_stderr": 0.03008574324856568 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.17, "acc_stderr": 0.03775251680686371, "acc_norm": 0.17, "acc_norm_stderr": 0.03775251680686371 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.11, "acc_stderr": 0.03144660377352203, "acc_norm": 0.11, "acc_norm_stderr": 0.03144660377352203 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.21, "acc_stderr": 0.04093601807403325, "acc_norm": 0.21, "acc_norm_stderr": 0.04093601807403325 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.1329479768786127, "acc_stderr": 0.02588804297966229, "acc_norm": 0.1329479768786127, "acc_norm_stderr": 0.02588804297966229 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.19607843137254902, "acc_stderr": 0.03950581861179961, "acc_norm": 0.19607843137254902, "acc_norm_stderr": 0.03950581861179961 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.25, "acc_stderr": 0.04351941398892446, "acc_norm": 0.25, "acc_norm_stderr": 0.04351941398892446 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.2170212765957447, "acc_stderr": 0.026947483121496245, "acc_norm": 0.2170212765957447, "acc_norm_stderr": 0.026947483121496245 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.20175438596491227, "acc_stderr": 0.03775205013583639, "acc_norm": 0.20175438596491227, "acc_norm_stderr": 0.03775205013583639 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.2, "acc_stderr": 0.03333333333333331, "acc_norm": 0.2, "acc_norm_stderr": 0.03333333333333331 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.22486772486772486, "acc_stderr": 0.02150209607822914, "acc_norm": 0.22486772486772486, "acc_norm_stderr": 0.02150209607822914 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.1984126984126984, "acc_stderr": 0.035670166752768635, "acc_norm": 0.1984126984126984, "acc_norm_stderr": 0.035670166752768635 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.16, "acc_stderr": 0.03684529491774709, "acc_norm": 0.16, "acc_norm_stderr": 0.03684529491774709 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.13225806451612904, "acc_stderr": 0.019272015434846485, "acc_norm": 0.13225806451612904, "acc_norm_stderr": 0.019272015434846485 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.1625615763546798, "acc_stderr": 0.025960300064605608, "acc_norm": 0.1625615763546798, "acc_norm_stderr": 0.025960300064605608 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.25, "acc_stderr": 0.04351941398892446, "acc_norm": 0.25, "acc_norm_stderr": 0.04351941398892446 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.13333333333333333, "acc_stderr": 0.026544435312706463, "acc_norm": 0.13333333333333333, "acc_norm_stderr": 0.026544435312706463 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.15151515151515152, "acc_stderr": 0.02554565042660359, "acc_norm": 0.15151515151515152, "acc_norm_stderr": 0.02554565042660359 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.16062176165803108, "acc_stderr": 0.026499057701397464, "acc_norm": 0.16062176165803108, "acc_norm_stderr": 0.026499057701397464 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.16666666666666666, "acc_stderr": 0.018895524482604946, "acc_norm": 0.16666666666666666, "acc_norm_stderr": 0.018895524482604946 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.23333333333333334, "acc_stderr": 0.02578787422095932, "acc_norm": 0.23333333333333334, "acc_norm_stderr": 0.02578787422095932 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.15966386554621848, "acc_stderr": 0.023793353997528802, "acc_norm": 0.15966386554621848, "acc_norm_stderr": 0.023793353997528802 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.152317880794702, "acc_stderr": 0.029339068831498692, "acc_norm": 0.152317880794702, "acc_norm_stderr": 0.029339068831498692 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.11559633027522936, "acc_stderr": 0.013708749534172641, "acc_norm": 0.11559633027522936, "acc_norm_stderr": 0.013708749534172641 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.1388888888888889, "acc_stderr": 0.023585447368900135, "acc_norm": 0.1388888888888889, "acc_norm_stderr": 0.023585447368900135 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.14215686274509803, "acc_stderr": 0.02450980392156862, "acc_norm": 0.14215686274509803, "acc_norm_stderr": 0.02450980392156862 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.12236286919831224, "acc_stderr": 0.02133174182974679, "acc_norm": 0.12236286919831224, "acc_norm_stderr": 0.02133174182974679 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.23318385650224216, "acc_stderr": 0.028380391147094706, "acc_norm": 0.23318385650224216, "acc_norm_stderr": 0.028380391147094706 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.17557251908396945, "acc_stderr": 0.03336820338476075, "acc_norm": 0.17557251908396945, "acc_norm_stderr": 0.03336820338476075 }, "harness|hendrycksTest-international_law|5": { "acc": 0.1652892561983471, "acc_stderr": 0.03390780612972776, "acc_norm": 0.1652892561983471, "acc_norm_stderr": 0.03390780612972776 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.12962962962962962, "acc_stderr": 0.032472243899179465, "acc_norm": 0.12962962962962962, "acc_norm_stderr": 0.032472243899179465 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.2085889570552147, "acc_stderr": 0.031921934489347235, "acc_norm": 0.2085889570552147, "acc_norm_stderr": 0.031921934489347235 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.25, "acc_stderr": 0.04109974682633932, "acc_norm": 0.25, "acc_norm_stderr": 0.04109974682633932 }, "harness|hendrycksTest-management|5": { "acc": 0.14563106796116504, "acc_stderr": 0.03492606476623791, "acc_norm": 0.14563106796116504, "acc_norm_stderr": 0.03492606476623791 }, "harness|hendrycksTest-marketing|5": { "acc": 0.16666666666666666, "acc_stderr": 0.024414947304543674, "acc_norm": 0.16666666666666666, "acc_norm_stderr": 0.024414947304543674 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.11, "acc_stderr": 0.03144660377352203, "acc_norm": 0.11, "acc_norm_stderr": 0.03144660377352203 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.17879948914431673, "acc_stderr": 0.013702643715368988, "acc_norm": 0.17879948914431673, "acc_norm_stderr": 0.013702643715368988 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.1994219653179191, "acc_stderr": 0.021511900654252535, "acc_norm": 0.1994219653179191, "acc_norm_stderr": 0.021511900654252535 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.22569832402234638, "acc_stderr": 0.013981395058455057, "acc_norm": 0.22569832402234638, "acc_norm_stderr": 0.013981395058455057 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.16013071895424835, "acc_stderr": 0.020998740930362303, "acc_norm": 0.16013071895424835, "acc_norm_stderr": 0.020998740930362303 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.14790996784565916, "acc_stderr": 0.0201632538062841, "acc_norm": 0.14790996784565916, "acc_norm_stderr": 0.0201632538062841 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.13580246913580246, "acc_stderr": 0.019061588181505378, "acc_norm": 0.13580246913580246, "acc_norm_stderr": 0.019061588181505378 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.1702127659574468, "acc_stderr": 0.022419518398146945, "acc_norm": 0.1702127659574468, "acc_norm_stderr": 0.022419518398146945 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.19361147327249023, "acc_stderr": 0.010091753580970883, "acc_norm": 0.19361147327249023, "acc_norm_stderr": 0.010091753580970883 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.17279411764705882, "acc_stderr": 0.022966067585581795, "acc_norm": 0.17279411764705882, "acc_norm_stderr": 0.022966067585581795 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.18627450980392157, "acc_stderr": 0.015750526284363377, "acc_norm": 0.18627450980392157, "acc_norm_stderr": 0.015750526284363377 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.22727272727272727, "acc_stderr": 0.04013964554072775, "acc_norm": 0.22727272727272727, "acc_norm_stderr": 0.04013964554072775 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.1306122448979592, "acc_stderr": 0.021572664699009297, "acc_norm": 0.1306122448979592, "acc_norm_stderr": 0.021572664699009297 }, "harness|hendrycksTest-sociology|5": { "acc": 0.1890547263681592, "acc_stderr": 0.02768691358801301, "acc_norm": 0.1890547263681592, "acc_norm_stderr": 0.02768691358801301 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.18, "acc_stderr": 0.03861229196653696, "acc_norm": 0.18, "acc_norm_stderr": 0.03861229196653696 }, "harness|hendrycksTest-virology|5": { "acc": 0.26506024096385544, "acc_stderr": 0.03436024037944967, "acc_norm": 0.26506024096385544, "acc_norm_stderr": 0.03436024037944967 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.15789473684210525, "acc_stderr": 0.027966785859160896, "acc_norm": 0.15789473684210525, "acc_norm_stderr": 0.027966785859160896 }, "harness|truthfulqa:mc|0": { "mc1": 0.2521419828641371, "mc1_stderr": 0.015201522246299953, "mc2": NaN, "mc2_stderr": NaN }, "harness|winogrande|5": { "acc": 0.5572217837411207, "acc_stderr": 0.013960157350784994 }, "harness|gsm8k|5": { "acc": 0.0, "acc_stderr": 0.0 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_OpenBuddy__openbuddy-mixtral-7bx8-v17.2-32k
[ "region:us" ]
2024-02-02T09:56:53+00:00
{"pretty_name": "Evaluation run of OpenBuddy/openbuddy-mixtral-7bx8-v17.2-32k", "dataset_summary": "Dataset automatically created during the evaluation run of model [OpenBuddy/openbuddy-mixtral-7bx8-v17.2-32k](https://huggingface.co/OpenBuddy/openbuddy-mixtral-7bx8-v17.2-32k) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_OpenBuddy__openbuddy-mixtral-7bx8-v17.2-32k\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-02-02T09:54:33.456360](https://huggingface.co/datasets/open-llm-leaderboard/details_OpenBuddy__openbuddy-mixtral-7bx8-v17.2-32k/blob/main/results_2024-02-02T09-54-33.456360.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.18431258568628095,\n \"acc_stderr\": 0.027127844586126878,\n \"acc_norm\": 0.18293847355265097,\n \"acc_norm_stderr\": 0.027828083788311676,\n \"mc1\": 0.2521419828641371,\n \"mc1_stderr\": 0.015201522246299953,\n \"mc2\": NaN,\n \"mc2_stderr\": NaN\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.2696245733788396,\n \"acc_stderr\": 0.012968040686869152,\n \"acc_norm\": 0.33532423208191126,\n \"acc_norm_stderr\": 0.013796182947785564\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.2717586138219478,\n \"acc_stderr\": 0.004439569447407354,\n \"acc_norm\": 0.31358295160326627,\n \"acc_norm_stderr\": 0.004630008293925626\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.3,\n \"acc_stderr\": 0.046056618647183814,\n \"acc_norm\": 0.3,\n \"acc_norm_stderr\": 0.046056618647183814\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.17037037037037037,\n \"acc_stderr\": 0.03247781185995593,\n \"acc_norm\": 0.17037037037037037,\n \"acc_norm_stderr\": 0.03247781185995593\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.13157894736842105,\n \"acc_stderr\": 0.027508689533549905,\n \"acc_norm\": 0.13157894736842105,\n \"acc_norm_stderr\": 0.027508689533549905\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.23,\n \"acc_stderr\": 0.04229525846816506,\n \"acc_norm\": 0.23,\n \"acc_norm_stderr\": 0.04229525846816506\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.16226415094339622,\n \"acc_stderr\": 0.022691482872035384,\n \"acc_norm\": 0.16226415094339622,\n \"acc_norm_stderr\": 0.022691482872035384\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.1527777777777778,\n \"acc_stderr\": 0.03008574324856568,\n \"acc_norm\": 0.1527777777777778,\n \"acc_norm_stderr\": 0.03008574324856568\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.17,\n \"acc_stderr\": 0.03775251680686371,\n \"acc_norm\": 0.17,\n \"acc_norm_stderr\": 0.03775251680686371\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.11,\n \"acc_stderr\": 0.03144660377352203,\n \"acc_norm\": 0.11,\n \"acc_norm_stderr\": 0.03144660377352203\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.21,\n \"acc_stderr\": 0.04093601807403325,\n \"acc_norm\": 0.21,\n \"acc_norm_stderr\": 0.04093601807403325\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.1329479768786127,\n \"acc_stderr\": 0.02588804297966229,\n \"acc_norm\": 0.1329479768786127,\n \"acc_norm_stderr\": 0.02588804297966229\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.19607843137254902,\n \"acc_stderr\": 0.03950581861179961,\n \"acc_norm\": 0.19607843137254902,\n \"acc_norm_stderr\": 0.03950581861179961\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.25,\n \"acc_stderr\": 0.04351941398892446,\n \"acc_norm\": 0.25,\n \"acc_norm_stderr\": 0.04351941398892446\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.2170212765957447,\n \"acc_stderr\": 0.026947483121496245,\n \"acc_norm\": 0.2170212765957447,\n \"acc_norm_stderr\": 0.026947483121496245\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.20175438596491227,\n \"acc_stderr\": 0.03775205013583639,\n \"acc_norm\": 0.20175438596491227,\n \"acc_norm_stderr\": 0.03775205013583639\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.2,\n \"acc_stderr\": 0.03333333333333331,\n \"acc_norm\": 0.2,\n \"acc_norm_stderr\": 0.03333333333333331\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.22486772486772486,\n \"acc_stderr\": 0.02150209607822914,\n \"acc_norm\": 0.22486772486772486,\n \"acc_norm_stderr\": 0.02150209607822914\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.1984126984126984,\n \"acc_stderr\": 0.035670166752768635,\n \"acc_norm\": 0.1984126984126984,\n \"acc_norm_stderr\": 0.035670166752768635\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.16,\n \"acc_stderr\": 0.03684529491774709,\n \"acc_norm\": 0.16,\n \"acc_norm_stderr\": 0.03684529491774709\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.13225806451612904,\n \"acc_stderr\": 0.019272015434846485,\n \"acc_norm\": 0.13225806451612904,\n \"acc_norm_stderr\": 0.019272015434846485\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.1625615763546798,\n \"acc_stderr\": 0.025960300064605608,\n \"acc_norm\": 0.1625615763546798,\n \"acc_norm_stderr\": 0.025960300064605608\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.25,\n \"acc_stderr\": 0.04351941398892446,\n \"acc_norm\": 0.25,\n \"acc_norm_stderr\": 0.04351941398892446\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.13333333333333333,\n \"acc_stderr\": 0.026544435312706463,\n \"acc_norm\": 0.13333333333333333,\n \"acc_norm_stderr\": 0.026544435312706463\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.15151515151515152,\n \"acc_stderr\": 0.02554565042660359,\n \"acc_norm\": 0.15151515151515152,\n \"acc_norm_stderr\": 0.02554565042660359\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.16062176165803108,\n \"acc_stderr\": 0.026499057701397464,\n \"acc_norm\": 0.16062176165803108,\n \"acc_norm_stderr\": 0.026499057701397464\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.16666666666666666,\n \"acc_stderr\": 0.018895524482604946,\n \"acc_norm\": 0.16666666666666666,\n \"acc_norm_stderr\": 0.018895524482604946\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.23333333333333334,\n \"acc_stderr\": 0.02578787422095932,\n \"acc_norm\": 0.23333333333333334,\n \"acc_norm_stderr\": 0.02578787422095932\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.15966386554621848,\n \"acc_stderr\": 0.023793353997528802,\n \"acc_norm\": 0.15966386554621848,\n \"acc_norm_stderr\": 0.023793353997528802\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.152317880794702,\n \"acc_stderr\": 0.029339068831498692,\n \"acc_norm\": 0.152317880794702,\n \"acc_norm_stderr\": 0.029339068831498692\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.11559633027522936,\n \"acc_stderr\": 0.013708749534172641,\n \"acc_norm\": 0.11559633027522936,\n \"acc_norm_stderr\": 0.013708749534172641\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.1388888888888889,\n \"acc_stderr\": 0.023585447368900135,\n \"acc_norm\": 0.1388888888888889,\n \"acc_norm_stderr\": 0.023585447368900135\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.14215686274509803,\n \"acc_stderr\": 0.02450980392156862,\n \"acc_norm\": 0.14215686274509803,\n \"acc_norm_stderr\": 0.02450980392156862\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.12236286919831224,\n \"acc_stderr\": 0.02133174182974679,\n \"acc_norm\": 0.12236286919831224,\n \"acc_norm_stderr\": 0.02133174182974679\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.23318385650224216,\n \"acc_stderr\": 0.028380391147094706,\n \"acc_norm\": 0.23318385650224216,\n \"acc_norm_stderr\": 0.028380391147094706\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.17557251908396945,\n \"acc_stderr\": 0.03336820338476075,\n \"acc_norm\": 0.17557251908396945,\n \"acc_norm_stderr\": 0.03336820338476075\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.1652892561983471,\n \"acc_stderr\": 0.03390780612972776,\n \"acc_norm\": 0.1652892561983471,\n \"acc_norm_stderr\": 0.03390780612972776\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.12962962962962962,\n \"acc_stderr\": 0.032472243899179465,\n \"acc_norm\": 0.12962962962962962,\n \"acc_norm_stderr\": 0.032472243899179465\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.2085889570552147,\n \"acc_stderr\": 0.031921934489347235,\n \"acc_norm\": 0.2085889570552147,\n \"acc_norm_stderr\": 0.031921934489347235\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.25,\n \"acc_stderr\": 0.04109974682633932,\n \"acc_norm\": 0.25,\n \"acc_norm_stderr\": 0.04109974682633932\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.14563106796116504,\n \"acc_stderr\": 0.03492606476623791,\n \"acc_norm\": 0.14563106796116504,\n \"acc_norm_stderr\": 0.03492606476623791\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.16666666666666666,\n \"acc_stderr\": 0.024414947304543674,\n \"acc_norm\": 0.16666666666666666,\n \"acc_norm_stderr\": 0.024414947304543674\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.11,\n \"acc_stderr\": 0.03144660377352203,\n \"acc_norm\": 0.11,\n \"acc_norm_stderr\": 0.03144660377352203\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.17879948914431673,\n \"acc_stderr\": 0.013702643715368988,\n \"acc_norm\": 0.17879948914431673,\n \"acc_norm_stderr\": 0.013702643715368988\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.1994219653179191,\n \"acc_stderr\": 0.021511900654252535,\n \"acc_norm\": 0.1994219653179191,\n \"acc_norm_stderr\": 0.021511900654252535\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.22569832402234638,\n \"acc_stderr\": 0.013981395058455057,\n \"acc_norm\": 0.22569832402234638,\n \"acc_norm_stderr\": 0.013981395058455057\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.16013071895424835,\n \"acc_stderr\": 0.020998740930362303,\n \"acc_norm\": 0.16013071895424835,\n \"acc_norm_stderr\": 0.020998740930362303\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.14790996784565916,\n \"acc_stderr\": 0.0201632538062841,\n \"acc_norm\": 0.14790996784565916,\n \"acc_norm_stderr\": 0.0201632538062841\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.13580246913580246,\n \"acc_stderr\": 0.019061588181505378,\n \"acc_norm\": 0.13580246913580246,\n \"acc_norm_stderr\": 0.019061588181505378\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.1702127659574468,\n \"acc_stderr\": 0.022419518398146945,\n \"acc_norm\": 0.1702127659574468,\n \"acc_norm_stderr\": 0.022419518398146945\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.19361147327249023,\n \"acc_stderr\": 0.010091753580970883,\n \"acc_norm\": 0.19361147327249023,\n \"acc_norm_stderr\": 0.010091753580970883\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.17279411764705882,\n \"acc_stderr\": 0.022966067585581795,\n \"acc_norm\": 0.17279411764705882,\n \"acc_norm_stderr\": 0.022966067585581795\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.18627450980392157,\n \"acc_stderr\": 0.015750526284363377,\n \"acc_norm\": 0.18627450980392157,\n \"acc_norm_stderr\": 0.015750526284363377\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.22727272727272727,\n \"acc_stderr\": 0.04013964554072775,\n \"acc_norm\": 0.22727272727272727,\n \"acc_norm_stderr\": 0.04013964554072775\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.1306122448979592,\n \"acc_stderr\": 0.021572664699009297,\n \"acc_norm\": 0.1306122448979592,\n \"acc_norm_stderr\": 0.021572664699009297\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.1890547263681592,\n \"acc_stderr\": 0.02768691358801301,\n \"acc_norm\": 0.1890547263681592,\n \"acc_norm_stderr\": 0.02768691358801301\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.18,\n \"acc_stderr\": 0.03861229196653696,\n \"acc_norm\": 0.18,\n \"acc_norm_stderr\": 0.03861229196653696\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.26506024096385544,\n \"acc_stderr\": 0.03436024037944967,\n \"acc_norm\": 0.26506024096385544,\n \"acc_norm_stderr\": 0.03436024037944967\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.15789473684210525,\n \"acc_stderr\": 0.027966785859160896,\n \"acc_norm\": 0.15789473684210525,\n \"acc_norm_stderr\": 0.027966785859160896\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.2521419828641371,\n \"mc1_stderr\": 0.015201522246299953,\n \"mc2\": NaN,\n \"mc2_stderr\": NaN\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.5572217837411207,\n \"acc_stderr\": 0.013960157350784994\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.0,\n \"acc_stderr\": 0.0\n }\n}\n```", "repo_url": "https://huggingface.co/OpenBuddy/openbuddy-mixtral-7bx8-v17.2-32k", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_02_02T09_54_33.456360", "path": ["**/details_harness|arc:challenge|25_2024-02-02T09-54-33.456360.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-02-02T09-54-33.456360.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_02_02T09_54_33.456360", "path": ["**/details_harness|gsm8k|5_2024-02-02T09-54-33.456360.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-02-02T09-54-33.456360.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_02_02T09_54_33.456360", "path": ["**/details_harness|hellaswag|10_2024-02-02T09-54-33.456360.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-02-02T09-54-33.456360.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_02_02T09_54_33.456360", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-02T09-54-33.456360.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-02-02T09-54-33.456360.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-02-02T09-54-33.456360.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-02-02T09-54-33.456360.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-02T09-54-33.456360.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-02-02T09-54-33.456360.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-02-02T09-54-33.456360.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-02-02T09-54-33.456360.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-02-02T09-54-33.456360.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-02-02T09-54-33.456360.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-02-02T09-54-33.456360.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-02-02T09-54-33.456360.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-02T09-54-33.456360.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-02-02T09-54-33.456360.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-02T09-54-33.456360.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-02T09-54-33.456360.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-02-02T09-54-33.456360.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-02-02T09-54-33.456360.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-02-02T09-54-33.456360.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-02T09-54-33.456360.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-02T09-54-33.456360.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-02T09-54-33.456360.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-02-02T09-54-33.456360.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-02T09-54-33.456360.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-02T09-54-33.456360.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-02T09-54-33.456360.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-02T09-54-33.456360.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-02-02T09-54-33.456360.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-02T09-54-33.456360.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-02T09-54-33.456360.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-02T09-54-33.456360.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-02T09-54-33.456360.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-02-02T09-54-33.456360.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-02-02T09-54-33.456360.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-02-02T09-54-33.456360.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-02-02T09-54-33.456360.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-02T09-54-33.456360.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-02-02T09-54-33.456360.parquet", "**/details_harness|hendrycksTest-management|5_2024-02-02T09-54-33.456360.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-02-02T09-54-33.456360.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-02-02T09-54-33.456360.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-02-02T09-54-33.456360.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-02-02T09-54-33.456360.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-02T09-54-33.456360.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-02-02T09-54-33.456360.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-02-02T09-54-33.456360.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-02-02T09-54-33.456360.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-02-02T09-54-33.456360.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-02-02T09-54-33.456360.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-02-02T09-54-33.456360.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-02-02T09-54-33.456360.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-02-02T09-54-33.456360.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-02-02T09-54-33.456360.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-02-02T09-54-33.456360.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-02T09-54-33.456360.parquet", "**/details_harness|hendrycksTest-virology|5_2024-02-02T09-54-33.456360.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-02-02T09-54-33.456360.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-02T09-54-33.456360.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-02-02T09-54-33.456360.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-02-02T09-54-33.456360.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-02-02T09-54-33.456360.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-02T09-54-33.456360.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-02-02T09-54-33.456360.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-02-02T09-54-33.456360.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-02-02T09-54-33.456360.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-02-02T09-54-33.456360.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-02-02T09-54-33.456360.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-02-02T09-54-33.456360.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-02-02T09-54-33.456360.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-02T09-54-33.456360.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-02-02T09-54-33.456360.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-02T09-54-33.456360.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-02T09-54-33.456360.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-02-02T09-54-33.456360.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-02-02T09-54-33.456360.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-02-02T09-54-33.456360.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-02T09-54-33.456360.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-02T09-54-33.456360.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-02T09-54-33.456360.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-02-02T09-54-33.456360.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-02T09-54-33.456360.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-02T09-54-33.456360.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-02T09-54-33.456360.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-02T09-54-33.456360.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-02-02T09-54-33.456360.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-02T09-54-33.456360.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-02T09-54-33.456360.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-02T09-54-33.456360.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-02T09-54-33.456360.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-02-02T09-54-33.456360.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-02-02T09-54-33.456360.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-02-02T09-54-33.456360.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-02-02T09-54-33.456360.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-02T09-54-33.456360.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-02-02T09-54-33.456360.parquet", "**/details_harness|hendrycksTest-management|5_2024-02-02T09-54-33.456360.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-02-02T09-54-33.456360.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-02-02T09-54-33.456360.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-02-02T09-54-33.456360.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-02-02T09-54-33.456360.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-02T09-54-33.456360.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-02-02T09-54-33.456360.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-02-02T09-54-33.456360.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-02-02T09-54-33.456360.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-02-02T09-54-33.456360.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-02-02T09-54-33.456360.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-02-02T09-54-33.456360.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-02-02T09-54-33.456360.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-02-02T09-54-33.456360.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-02-02T09-54-33.456360.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-02-02T09-54-33.456360.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-02T09-54-33.456360.parquet", "**/details_harness|hendrycksTest-virology|5_2024-02-02T09-54-33.456360.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-02-02T09-54-33.456360.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_02_02T09_54_33.456360", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-02T09-54-33.456360.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-02T09-54-33.456360.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_02_02T09_54_33.456360", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-02-02T09-54-33.456360.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-02-02T09-54-33.456360.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_02_02T09_54_33.456360", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-02-02T09-54-33.456360.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-02-02T09-54-33.456360.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_02_02T09_54_33.456360", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-02-02T09-54-33.456360.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-02-02T09-54-33.456360.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_02_02T09_54_33.456360", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-02T09-54-33.456360.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-02T09-54-33.456360.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_02_02T09_54_33.456360", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-02-02T09-54-33.456360.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-02-02T09-54-33.456360.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_02_02T09_54_33.456360", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-02-02T09-54-33.456360.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-02-02T09-54-33.456360.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_02_02T09_54_33.456360", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-02-02T09-54-33.456360.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-02-02T09-54-33.456360.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_02_02T09_54_33.456360", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-02-02T09-54-33.456360.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-02-02T09-54-33.456360.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_02_02T09_54_33.456360", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-02-02T09-54-33.456360.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-02-02T09-54-33.456360.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_02_02T09_54_33.456360", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-02-02T09-54-33.456360.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-02-02T09-54-33.456360.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_02_02T09_54_33.456360", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-02-02T09-54-33.456360.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-02-02T09-54-33.456360.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_02_02T09_54_33.456360", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-02T09-54-33.456360.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-02T09-54-33.456360.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_02_02T09_54_33.456360", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-02-02T09-54-33.456360.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-02-02T09-54-33.456360.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_02_02T09_54_33.456360", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-02T09-54-33.456360.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-02T09-54-33.456360.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_02_02T09_54_33.456360", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-02T09-54-33.456360.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-02T09-54-33.456360.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_02_02T09_54_33.456360", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-02-02T09-54-33.456360.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-02-02T09-54-33.456360.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_02_02T09_54_33.456360", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-02-02T09-54-33.456360.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-02-02T09-54-33.456360.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_02_02T09_54_33.456360", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-02-02T09-54-33.456360.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-02-02T09-54-33.456360.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_02_02T09_54_33.456360", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-02T09-54-33.456360.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-02T09-54-33.456360.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_02_02T09_54_33.456360", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-02T09-54-33.456360.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-02T09-54-33.456360.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_02_02T09_54_33.456360", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-02T09-54-33.456360.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-02T09-54-33.456360.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_02_02T09_54_33.456360", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-02-02T09-54-33.456360.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-02-02T09-54-33.456360.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_02_02T09_54_33.456360", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-02T09-54-33.456360.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-02T09-54-33.456360.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_02_02T09_54_33.456360", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-02T09-54-33.456360.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-02T09-54-33.456360.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_02_02T09_54_33.456360", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-02T09-54-33.456360.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-02T09-54-33.456360.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_02_02T09_54_33.456360", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-02T09-54-33.456360.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-02T09-54-33.456360.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_02_02T09_54_33.456360", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-02-02T09-54-33.456360.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-02-02T09-54-33.456360.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_02_02T09_54_33.456360", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-02T09-54-33.456360.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-02T09-54-33.456360.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_02_02T09_54_33.456360", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-02T09-54-33.456360.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-02T09-54-33.456360.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_02_02T09_54_33.456360", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-02T09-54-33.456360.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-02T09-54-33.456360.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_02_02T09_54_33.456360", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-02T09-54-33.456360.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-02T09-54-33.456360.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_02_02T09_54_33.456360", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-02-02T09-54-33.456360.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-02-02T09-54-33.456360.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_02_02T09_54_33.456360", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-02-02T09-54-33.456360.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-02-02T09-54-33.456360.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_02_02T09_54_33.456360", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-02-02T09-54-33.456360.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-02-02T09-54-33.456360.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_02_02T09_54_33.456360", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-02-02T09-54-33.456360.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-02-02T09-54-33.456360.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_02_02T09_54_33.456360", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-02T09-54-33.456360.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-02T09-54-33.456360.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_02_02T09_54_33.456360", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-02-02T09-54-33.456360.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-02-02T09-54-33.456360.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_02_02T09_54_33.456360", "path": ["**/details_harness|hendrycksTest-management|5_2024-02-02T09-54-33.456360.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-02-02T09-54-33.456360.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_02_02T09_54_33.456360", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-02-02T09-54-33.456360.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-02-02T09-54-33.456360.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_02_02T09_54_33.456360", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-02-02T09-54-33.456360.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-02-02T09-54-33.456360.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_02_02T09_54_33.456360", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-02-02T09-54-33.456360.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-02-02T09-54-33.456360.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_02_02T09_54_33.456360", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-02-02T09-54-33.456360.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-02-02T09-54-33.456360.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_02_02T09_54_33.456360", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-02T09-54-33.456360.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-02T09-54-33.456360.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_02_02T09_54_33.456360", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-02-02T09-54-33.456360.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-02-02T09-54-33.456360.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_02_02T09_54_33.456360", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-02-02T09-54-33.456360.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-02-02T09-54-33.456360.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_02_02T09_54_33.456360", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-02-02T09-54-33.456360.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-02-02T09-54-33.456360.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_02_02T09_54_33.456360", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-02-02T09-54-33.456360.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-02-02T09-54-33.456360.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_02_02T09_54_33.456360", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-02-02T09-54-33.456360.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-02-02T09-54-33.456360.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_02_02T09_54_33.456360", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-02-02T09-54-33.456360.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-02-02T09-54-33.456360.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_02_02T09_54_33.456360", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-02-02T09-54-33.456360.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-02-02T09-54-33.456360.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_02_02T09_54_33.456360", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-02-02T09-54-33.456360.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-02-02T09-54-33.456360.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_02_02T09_54_33.456360", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-02-02T09-54-33.456360.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-02-02T09-54-33.456360.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_02_02T09_54_33.456360", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-02-02T09-54-33.456360.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-02-02T09-54-33.456360.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_02_02T09_54_33.456360", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-02T09-54-33.456360.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-02T09-54-33.456360.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_02_02T09_54_33.456360", "path": ["**/details_harness|hendrycksTest-virology|5_2024-02-02T09-54-33.456360.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-02-02T09-54-33.456360.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_02_02T09_54_33.456360", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-02-02T09-54-33.456360.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-02-02T09-54-33.456360.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_02_02T09_54_33.456360", "path": ["**/details_harness|truthfulqa:mc|0_2024-02-02T09-54-33.456360.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-02-02T09-54-33.456360.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_02_02T09_54_33.456360", "path": ["**/details_harness|winogrande|5_2024-02-02T09-54-33.456360.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-02-02T09-54-33.456360.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_02_02T09_54_33.456360", "path": ["results_2024-02-02T09-54-33.456360.parquet"]}, {"split": "latest", "path": ["results_2024-02-02T09-54-33.456360.parquet"]}]}]}
2024-02-02T09:57:22+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of OpenBuddy/openbuddy-mixtral-7bx8-v17.2-32k Dataset automatically created during the evaluation run of model OpenBuddy/openbuddy-mixtral-7bx8-v17.2-32k on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-02-02T09:54:33.456360(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of OpenBuddy/openbuddy-mixtral-7bx8-v17.2-32k\n\n\n\nDataset automatically created during the evaluation run of model OpenBuddy/openbuddy-mixtral-7bx8-v17.2-32k on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-02-02T09:54:33.456360(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of OpenBuddy/openbuddy-mixtral-7bx8-v17.2-32k\n\n\n\nDataset automatically created during the evaluation run of model OpenBuddy/openbuddy-mixtral-7bx8-v17.2-32k on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-02-02T09:54:33.456360(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
386f1f24495574e546cb60319b742bcbb3c1c271
# Dataset Card for Evaluation run of JaeyeonKang/CCK-v2.0-DPO <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [JaeyeonKang/CCK-v2.0-DPO](https://huggingface.co/JaeyeonKang/CCK-v2.0-DPO) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_JaeyeonKang__CCK-v2.0-DPO", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-02-02T09:56:16.082632](https://huggingface.co/datasets/open-llm-leaderboard/details_JaeyeonKang__CCK-v2.0-DPO/blob/main/results_2024-02-02T09-56-16.082632.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.6216656454546163, "acc_stderr": 0.03270192505428999, "acc_norm": 0.6258316061083755, "acc_norm_stderr": 0.03337309170640736, "mc1": 0.5520195838433293, "mc1_stderr": 0.017408513063422913, "mc2": 0.6933493958785608, "mc2_stderr": 0.014773200729827912 }, "harness|arc:challenge|25": { "acc": 0.6160409556313993, "acc_stderr": 0.01421244498065189, "acc_norm": 0.658703071672355, "acc_norm_stderr": 0.013855831287497728 }, "harness|hellaswag|10": { "acc": 0.6923919537940649, "acc_stderr": 0.004605601610012387, "acc_norm": 0.8680541724756025, "acc_norm_stderr": 0.0033774020414626175 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.27, "acc_stderr": 0.044619604333847394, "acc_norm": 0.27, "acc_norm_stderr": 0.044619604333847394 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.5259259259259259, "acc_stderr": 0.04313531696750575, "acc_norm": 0.5259259259259259, "acc_norm_stderr": 0.04313531696750575 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.7039473684210527, "acc_stderr": 0.037150621549989056, "acc_norm": 0.7039473684210527, "acc_norm_stderr": 0.037150621549989056 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.66, "acc_stderr": 0.04760952285695237, "acc_norm": 0.66, "acc_norm_stderr": 0.04760952285695237 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.6981132075471698, "acc_stderr": 0.02825420034443866, "acc_norm": 0.6981132075471698, "acc_norm_stderr": 0.02825420034443866 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.6875, "acc_stderr": 0.038760854559127644, "acc_norm": 0.6875, "acc_norm_stderr": 0.038760854559127644 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.39, "acc_stderr": 0.04902071300001975, "acc_norm": 0.39, "acc_norm_stderr": 0.04902071300001975 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.47, "acc_stderr": 0.05016135580465919, "acc_norm": 0.47, "acc_norm_stderr": 0.05016135580465919 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.33, "acc_stderr": 0.047258156262526045, "acc_norm": 0.33, "acc_norm_stderr": 0.047258156262526045 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.6589595375722543, "acc_stderr": 0.036146654241808254, "acc_norm": 0.6589595375722543, "acc_norm_stderr": 0.036146654241808254 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.49019607843137253, "acc_stderr": 0.04974229460422817, "acc_norm": 0.49019607843137253, "acc_norm_stderr": 0.04974229460422817 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.67, "acc_stderr": 0.04725815626252609, "acc_norm": 0.67, "acc_norm_stderr": 0.04725815626252609 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.5446808510638298, "acc_stderr": 0.03255525359340354, "acc_norm": 0.5446808510638298, "acc_norm_stderr": 0.03255525359340354 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.4473684210526316, "acc_stderr": 0.04677473004491199, "acc_norm": 0.4473684210526316, "acc_norm_stderr": 0.04677473004491199 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.5655172413793104, "acc_stderr": 0.04130740879555497, "acc_norm": 0.5655172413793104, "acc_norm_stderr": 0.04130740879555497 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.41005291005291006, "acc_stderr": 0.02533120243894443, "acc_norm": 0.41005291005291006, "acc_norm_stderr": 0.02533120243894443 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.5, "acc_stderr": 0.04472135954999579, "acc_norm": 0.5, "acc_norm_stderr": 0.04472135954999579 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.24, "acc_stderr": 0.04292346959909283, "acc_norm": 0.24, "acc_norm_stderr": 0.04292346959909283 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.7709677419354839, "acc_stderr": 0.023904914311782655, "acc_norm": 0.7709677419354839, "acc_norm_stderr": 0.023904914311782655 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.4482758620689655, "acc_stderr": 0.03499113137676744, "acc_norm": 0.4482758620689655, "acc_norm_stderr": 0.03499113137676744 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.65, "acc_stderr": 0.0479372485441102, "acc_norm": 0.65, "acc_norm_stderr": 0.0479372485441102 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.7696969696969697, "acc_stderr": 0.032876667586034906, "acc_norm": 0.7696969696969697, "acc_norm_stderr": 0.032876667586034906 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.803030303030303, "acc_stderr": 0.028335609732463355, "acc_norm": 0.803030303030303, "acc_norm_stderr": 0.028335609732463355 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.8808290155440415, "acc_stderr": 0.02338193534812143, "acc_norm": 0.8808290155440415, "acc_norm_stderr": 0.02338193534812143 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.6358974358974359, "acc_stderr": 0.024396672985094767, "acc_norm": 0.6358974358974359, "acc_norm_stderr": 0.024396672985094767 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.3592592592592593, "acc_stderr": 0.029252905927251972, "acc_norm": 0.3592592592592593, "acc_norm_stderr": 0.029252905927251972 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.6974789915966386, "acc_stderr": 0.029837962388291932, "acc_norm": 0.6974789915966386, "acc_norm_stderr": 0.029837962388291932 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.3443708609271523, "acc_stderr": 0.038796870240733264, "acc_norm": 0.3443708609271523, "acc_norm_stderr": 0.038796870240733264 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.8330275229357799, "acc_stderr": 0.015990154885073406, "acc_norm": 0.8330275229357799, "acc_norm_stderr": 0.015990154885073406 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.5694444444444444, "acc_stderr": 0.03376922151252335, "acc_norm": 0.5694444444444444, "acc_norm_stderr": 0.03376922151252335 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.8137254901960784, "acc_stderr": 0.027325470966716312, "acc_norm": 0.8137254901960784, "acc_norm_stderr": 0.027325470966716312 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.8016877637130801, "acc_stderr": 0.02595502084162113, "acc_norm": 0.8016877637130801, "acc_norm_stderr": 0.02595502084162113 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.6636771300448431, "acc_stderr": 0.031708824268455, "acc_norm": 0.6636771300448431, "acc_norm_stderr": 0.031708824268455 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.7480916030534351, "acc_stderr": 0.03807387116306085, "acc_norm": 0.7480916030534351, "acc_norm_stderr": 0.03807387116306085 }, "harness|hendrycksTest-international_law|5": { "acc": 0.7603305785123967, "acc_stderr": 0.038968789850704164, "acc_norm": 0.7603305785123967, "acc_norm_stderr": 0.038968789850704164 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.7407407407407407, "acc_stderr": 0.04236511258094634, "acc_norm": 0.7407407407407407, "acc_norm_stderr": 0.04236511258094634 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.6871165644171779, "acc_stderr": 0.036429145782924055, "acc_norm": 0.6871165644171779, "acc_norm_stderr": 0.036429145782924055 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.41964285714285715, "acc_stderr": 0.04684099321077106, "acc_norm": 0.41964285714285715, "acc_norm_stderr": 0.04684099321077106 }, "harness|hendrycksTest-management|5": { "acc": 0.7669902912621359, "acc_stderr": 0.04185832598928315, "acc_norm": 0.7669902912621359, "acc_norm_stderr": 0.04185832598928315 }, "harness|hendrycksTest-marketing|5": { "acc": 0.8589743589743589, "acc_stderr": 0.022801382534597524, "acc_norm": 0.8589743589743589, "acc_norm_stderr": 0.022801382534597524 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.63, "acc_stderr": 0.04852365870939099, "acc_norm": 0.63, "acc_norm_stderr": 0.04852365870939099 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.8033205619412516, "acc_stderr": 0.014214138556913915, "acc_norm": 0.8033205619412516, "acc_norm_stderr": 0.014214138556913915 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.6965317919075145, "acc_stderr": 0.024752411960917205, "acc_norm": 0.6965317919075145, "acc_norm_stderr": 0.024752411960917205 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.42793296089385474, "acc_stderr": 0.016547887997416105, "acc_norm": 0.42793296089385474, "acc_norm_stderr": 0.016547887997416105 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.7156862745098039, "acc_stderr": 0.025829163272757482, "acc_norm": 0.7156862745098039, "acc_norm_stderr": 0.025829163272757482 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.6559485530546624, "acc_stderr": 0.02698147804364804, "acc_norm": 0.6559485530546624, "acc_norm_stderr": 0.02698147804364804 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.7129629629629629, "acc_stderr": 0.025171041915309684, "acc_norm": 0.7129629629629629, "acc_norm_stderr": 0.025171041915309684 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.46808510638297873, "acc_stderr": 0.02976667507587387, "acc_norm": 0.46808510638297873, "acc_norm_stderr": 0.02976667507587387 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.4517601043024772, "acc_stderr": 0.012710662233660247, "acc_norm": 0.4517601043024772, "acc_norm_stderr": 0.012710662233660247 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.6139705882352942, "acc_stderr": 0.029573269134411124, "acc_norm": 0.6139705882352942, "acc_norm_stderr": 0.029573269134411124 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.6421568627450981, "acc_stderr": 0.01939305840235543, "acc_norm": 0.6421568627450981, "acc_norm_stderr": 0.01939305840235543 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.6727272727272727, "acc_stderr": 0.0449429086625209, "acc_norm": 0.6727272727272727, "acc_norm_stderr": 0.0449429086625209 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.7224489795918367, "acc_stderr": 0.028666857790274645, "acc_norm": 0.7224489795918367, "acc_norm_stderr": 0.028666857790274645 }, "harness|hendrycksTest-sociology|5": { "acc": 0.8159203980099502, "acc_stderr": 0.027403859410786848, "acc_norm": 0.8159203980099502, "acc_norm_stderr": 0.027403859410786848 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.84, "acc_stderr": 0.03684529491774709, "acc_norm": 0.84, "acc_norm_stderr": 0.03684529491774709 }, "harness|hendrycksTest-virology|5": { "acc": 0.4879518072289157, "acc_stderr": 0.03891364495835821, "acc_norm": 0.4879518072289157, "acc_norm_stderr": 0.03891364495835821 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.7543859649122807, "acc_stderr": 0.0330140594698725, "acc_norm": 0.7543859649122807, "acc_norm_stderr": 0.0330140594698725 }, "harness|truthfulqa:mc|0": { "mc1": 0.5520195838433293, "mc1_stderr": 0.017408513063422913, "mc2": 0.6933493958785608, "mc2_stderr": 0.014773200729827912 }, "harness|winogrande|5": { "acc": 0.8216258879242304, "acc_stderr": 0.010759352014855917 }, "harness|gsm8k|5": { "acc": 0.39423805913570886, "acc_stderr": 0.013460852357095652 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_JaeyeonKang__CCK-v2.0-DPO
[ "region:us" ]
2024-02-02T09:58:33+00:00
{"pretty_name": "Evaluation run of JaeyeonKang/CCK-v2.0-DPO", "dataset_summary": "Dataset automatically created during the evaluation run of model [JaeyeonKang/CCK-v2.0-DPO](https://huggingface.co/JaeyeonKang/CCK-v2.0-DPO) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_JaeyeonKang__CCK-v2.0-DPO\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-02-02T09:56:16.082632](https://huggingface.co/datasets/open-llm-leaderboard/details_JaeyeonKang__CCK-v2.0-DPO/blob/main/results_2024-02-02T09-56-16.082632.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.6216656454546163,\n \"acc_stderr\": 0.03270192505428999,\n \"acc_norm\": 0.6258316061083755,\n \"acc_norm_stderr\": 0.03337309170640736,\n \"mc1\": 0.5520195838433293,\n \"mc1_stderr\": 0.017408513063422913,\n \"mc2\": 0.6933493958785608,\n \"mc2_stderr\": 0.014773200729827912\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.6160409556313993,\n \"acc_stderr\": 0.01421244498065189,\n \"acc_norm\": 0.658703071672355,\n \"acc_norm_stderr\": 0.013855831287497728\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.6923919537940649,\n \"acc_stderr\": 0.004605601610012387,\n \"acc_norm\": 0.8680541724756025,\n \"acc_norm_stderr\": 0.0033774020414626175\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.27,\n \"acc_stderr\": 0.044619604333847394,\n \"acc_norm\": 0.27,\n \"acc_norm_stderr\": 0.044619604333847394\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.5259259259259259,\n \"acc_stderr\": 0.04313531696750575,\n \"acc_norm\": 0.5259259259259259,\n \"acc_norm_stderr\": 0.04313531696750575\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.7039473684210527,\n \"acc_stderr\": 0.037150621549989056,\n \"acc_norm\": 0.7039473684210527,\n \"acc_norm_stderr\": 0.037150621549989056\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.66,\n \"acc_stderr\": 0.04760952285695237,\n \"acc_norm\": 0.66,\n \"acc_norm_stderr\": 0.04760952285695237\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.6981132075471698,\n \"acc_stderr\": 0.02825420034443866,\n \"acc_norm\": 0.6981132075471698,\n \"acc_norm_stderr\": 0.02825420034443866\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.6875,\n \"acc_stderr\": 0.038760854559127644,\n \"acc_norm\": 0.6875,\n \"acc_norm_stderr\": 0.038760854559127644\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.39,\n \"acc_stderr\": 0.04902071300001975,\n \"acc_norm\": 0.39,\n \"acc_norm_stderr\": 0.04902071300001975\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.47,\n \"acc_stderr\": 0.05016135580465919,\n \"acc_norm\": 0.47,\n \"acc_norm_stderr\": 0.05016135580465919\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.33,\n \"acc_stderr\": 0.047258156262526045,\n \"acc_norm\": 0.33,\n \"acc_norm_stderr\": 0.047258156262526045\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.6589595375722543,\n \"acc_stderr\": 0.036146654241808254,\n \"acc_norm\": 0.6589595375722543,\n \"acc_norm_stderr\": 0.036146654241808254\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.49019607843137253,\n \"acc_stderr\": 0.04974229460422817,\n \"acc_norm\": 0.49019607843137253,\n \"acc_norm_stderr\": 0.04974229460422817\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.67,\n \"acc_stderr\": 0.04725815626252609,\n \"acc_norm\": 0.67,\n \"acc_norm_stderr\": 0.04725815626252609\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.5446808510638298,\n \"acc_stderr\": 0.03255525359340354,\n \"acc_norm\": 0.5446808510638298,\n \"acc_norm_stderr\": 0.03255525359340354\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.4473684210526316,\n \"acc_stderr\": 0.04677473004491199,\n \"acc_norm\": 0.4473684210526316,\n \"acc_norm_stderr\": 0.04677473004491199\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.5655172413793104,\n \"acc_stderr\": 0.04130740879555497,\n \"acc_norm\": 0.5655172413793104,\n \"acc_norm_stderr\": 0.04130740879555497\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.41005291005291006,\n \"acc_stderr\": 0.02533120243894443,\n \"acc_norm\": 0.41005291005291006,\n \"acc_norm_stderr\": 0.02533120243894443\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.5,\n \"acc_stderr\": 0.04472135954999579,\n \"acc_norm\": 0.5,\n \"acc_norm_stderr\": 0.04472135954999579\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.24,\n \"acc_stderr\": 0.04292346959909283,\n \"acc_norm\": 0.24,\n \"acc_norm_stderr\": 0.04292346959909283\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.7709677419354839,\n \"acc_stderr\": 0.023904914311782655,\n \"acc_norm\": 0.7709677419354839,\n \"acc_norm_stderr\": 0.023904914311782655\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.4482758620689655,\n \"acc_stderr\": 0.03499113137676744,\n \"acc_norm\": 0.4482758620689655,\n \"acc_norm_stderr\": 0.03499113137676744\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.65,\n \"acc_stderr\": 0.0479372485441102,\n \"acc_norm\": 0.65,\n \"acc_norm_stderr\": 0.0479372485441102\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.7696969696969697,\n \"acc_stderr\": 0.032876667586034906,\n \"acc_norm\": 0.7696969696969697,\n \"acc_norm_stderr\": 0.032876667586034906\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.803030303030303,\n \"acc_stderr\": 0.028335609732463355,\n \"acc_norm\": 0.803030303030303,\n \"acc_norm_stderr\": 0.028335609732463355\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.8808290155440415,\n \"acc_stderr\": 0.02338193534812143,\n \"acc_norm\": 0.8808290155440415,\n \"acc_norm_stderr\": 0.02338193534812143\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.6358974358974359,\n \"acc_stderr\": 0.024396672985094767,\n \"acc_norm\": 0.6358974358974359,\n \"acc_norm_stderr\": 0.024396672985094767\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.3592592592592593,\n \"acc_stderr\": 0.029252905927251972,\n \"acc_norm\": 0.3592592592592593,\n \"acc_norm_stderr\": 0.029252905927251972\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.6974789915966386,\n \"acc_stderr\": 0.029837962388291932,\n \"acc_norm\": 0.6974789915966386,\n \"acc_norm_stderr\": 0.029837962388291932\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.3443708609271523,\n \"acc_stderr\": 0.038796870240733264,\n \"acc_norm\": 0.3443708609271523,\n \"acc_norm_stderr\": 0.038796870240733264\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.8330275229357799,\n \"acc_stderr\": 0.015990154885073406,\n \"acc_norm\": 0.8330275229357799,\n \"acc_norm_stderr\": 0.015990154885073406\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.5694444444444444,\n \"acc_stderr\": 0.03376922151252335,\n \"acc_norm\": 0.5694444444444444,\n \"acc_norm_stderr\": 0.03376922151252335\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.8137254901960784,\n \"acc_stderr\": 0.027325470966716312,\n \"acc_norm\": 0.8137254901960784,\n \"acc_norm_stderr\": 0.027325470966716312\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.8016877637130801,\n \"acc_stderr\": 0.02595502084162113,\n \"acc_norm\": 0.8016877637130801,\n \"acc_norm_stderr\": 0.02595502084162113\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.6636771300448431,\n \"acc_stderr\": 0.031708824268455,\n \"acc_norm\": 0.6636771300448431,\n \"acc_norm_stderr\": 0.031708824268455\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.7480916030534351,\n \"acc_stderr\": 0.03807387116306085,\n \"acc_norm\": 0.7480916030534351,\n \"acc_norm_stderr\": 0.03807387116306085\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.7603305785123967,\n \"acc_stderr\": 0.038968789850704164,\n \"acc_norm\": 0.7603305785123967,\n \"acc_norm_stderr\": 0.038968789850704164\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.7407407407407407,\n \"acc_stderr\": 0.04236511258094634,\n \"acc_norm\": 0.7407407407407407,\n \"acc_norm_stderr\": 0.04236511258094634\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.6871165644171779,\n \"acc_stderr\": 0.036429145782924055,\n \"acc_norm\": 0.6871165644171779,\n \"acc_norm_stderr\": 0.036429145782924055\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.41964285714285715,\n \"acc_stderr\": 0.04684099321077106,\n \"acc_norm\": 0.41964285714285715,\n \"acc_norm_stderr\": 0.04684099321077106\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.7669902912621359,\n \"acc_stderr\": 0.04185832598928315,\n \"acc_norm\": 0.7669902912621359,\n \"acc_norm_stderr\": 0.04185832598928315\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8589743589743589,\n \"acc_stderr\": 0.022801382534597524,\n \"acc_norm\": 0.8589743589743589,\n \"acc_norm_stderr\": 0.022801382534597524\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.63,\n \"acc_stderr\": 0.04852365870939099,\n \"acc_norm\": 0.63,\n \"acc_norm_stderr\": 0.04852365870939099\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.8033205619412516,\n \"acc_stderr\": 0.014214138556913915,\n \"acc_norm\": 0.8033205619412516,\n \"acc_norm_stderr\": 0.014214138556913915\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.6965317919075145,\n \"acc_stderr\": 0.024752411960917205,\n \"acc_norm\": 0.6965317919075145,\n \"acc_norm_stderr\": 0.024752411960917205\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.42793296089385474,\n \"acc_stderr\": 0.016547887997416105,\n \"acc_norm\": 0.42793296089385474,\n \"acc_norm_stderr\": 0.016547887997416105\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.7156862745098039,\n \"acc_stderr\": 0.025829163272757482,\n \"acc_norm\": 0.7156862745098039,\n \"acc_norm_stderr\": 0.025829163272757482\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.6559485530546624,\n \"acc_stderr\": 0.02698147804364804,\n \"acc_norm\": 0.6559485530546624,\n \"acc_norm_stderr\": 0.02698147804364804\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.7129629629629629,\n \"acc_stderr\": 0.025171041915309684,\n \"acc_norm\": 0.7129629629629629,\n \"acc_norm_stderr\": 0.025171041915309684\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.46808510638297873,\n \"acc_stderr\": 0.02976667507587387,\n \"acc_norm\": 0.46808510638297873,\n \"acc_norm_stderr\": 0.02976667507587387\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.4517601043024772,\n \"acc_stderr\": 0.012710662233660247,\n \"acc_norm\": 0.4517601043024772,\n \"acc_norm_stderr\": 0.012710662233660247\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.6139705882352942,\n \"acc_stderr\": 0.029573269134411124,\n \"acc_norm\": 0.6139705882352942,\n \"acc_norm_stderr\": 0.029573269134411124\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.6421568627450981,\n \"acc_stderr\": 0.01939305840235543,\n \"acc_norm\": 0.6421568627450981,\n \"acc_norm_stderr\": 0.01939305840235543\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6727272727272727,\n \"acc_stderr\": 0.0449429086625209,\n \"acc_norm\": 0.6727272727272727,\n \"acc_norm_stderr\": 0.0449429086625209\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.7224489795918367,\n \"acc_stderr\": 0.028666857790274645,\n \"acc_norm\": 0.7224489795918367,\n \"acc_norm_stderr\": 0.028666857790274645\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.8159203980099502,\n \"acc_stderr\": 0.027403859410786848,\n \"acc_norm\": 0.8159203980099502,\n \"acc_norm_stderr\": 0.027403859410786848\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.84,\n \"acc_stderr\": 0.03684529491774709,\n \"acc_norm\": 0.84,\n \"acc_norm_stderr\": 0.03684529491774709\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.4879518072289157,\n \"acc_stderr\": 0.03891364495835821,\n \"acc_norm\": 0.4879518072289157,\n \"acc_norm_stderr\": 0.03891364495835821\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.7543859649122807,\n \"acc_stderr\": 0.0330140594698725,\n \"acc_norm\": 0.7543859649122807,\n \"acc_norm_stderr\": 0.0330140594698725\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.5520195838433293,\n \"mc1_stderr\": 0.017408513063422913,\n \"mc2\": 0.6933493958785608,\n \"mc2_stderr\": 0.014773200729827912\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.8216258879242304,\n \"acc_stderr\": 0.010759352014855917\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.39423805913570886,\n \"acc_stderr\": 0.013460852357095652\n }\n}\n```", "repo_url": "https://huggingface.co/JaeyeonKang/CCK-v2.0-DPO", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_02_02T09_56_16.082632", "path": ["**/details_harness|arc:challenge|25_2024-02-02T09-56-16.082632.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-02-02T09-56-16.082632.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_02_02T09_56_16.082632", "path": ["**/details_harness|gsm8k|5_2024-02-02T09-56-16.082632.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-02-02T09-56-16.082632.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_02_02T09_56_16.082632", "path": ["**/details_harness|hellaswag|10_2024-02-02T09-56-16.082632.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-02-02T09-56-16.082632.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_02_02T09_56_16.082632", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-02T09-56-16.082632.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-02-02T09-56-16.082632.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-02-02T09-56-16.082632.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-02-02T09-56-16.082632.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-02T09-56-16.082632.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-02-02T09-56-16.082632.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-02-02T09-56-16.082632.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-02-02T09-56-16.082632.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-02-02T09-56-16.082632.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-02-02T09-56-16.082632.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-02-02T09-56-16.082632.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-02-02T09-56-16.082632.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-02T09-56-16.082632.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-02-02T09-56-16.082632.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-02T09-56-16.082632.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-02T09-56-16.082632.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-02-02T09-56-16.082632.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-02-02T09-56-16.082632.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-02-02T09-56-16.082632.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-02T09-56-16.082632.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-02T09-56-16.082632.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-02T09-56-16.082632.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-02-02T09-56-16.082632.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-02T09-56-16.082632.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-02T09-56-16.082632.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-02T09-56-16.082632.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-02T09-56-16.082632.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-02-02T09-56-16.082632.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-02T09-56-16.082632.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-02T09-56-16.082632.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-02T09-56-16.082632.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-02T09-56-16.082632.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-02-02T09-56-16.082632.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-02-02T09-56-16.082632.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-02-02T09-56-16.082632.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-02-02T09-56-16.082632.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-02T09-56-16.082632.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-02-02T09-56-16.082632.parquet", "**/details_harness|hendrycksTest-management|5_2024-02-02T09-56-16.082632.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-02-02T09-56-16.082632.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-02-02T09-56-16.082632.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-02-02T09-56-16.082632.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-02-02T09-56-16.082632.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-02T09-56-16.082632.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-02-02T09-56-16.082632.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-02-02T09-56-16.082632.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-02-02T09-56-16.082632.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-02-02T09-56-16.082632.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-02-02T09-56-16.082632.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-02-02T09-56-16.082632.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-02-02T09-56-16.082632.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-02-02T09-56-16.082632.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-02-02T09-56-16.082632.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-02-02T09-56-16.082632.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-02T09-56-16.082632.parquet", "**/details_harness|hendrycksTest-virology|5_2024-02-02T09-56-16.082632.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-02-02T09-56-16.082632.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-02T09-56-16.082632.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-02-02T09-56-16.082632.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-02-02T09-56-16.082632.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-02-02T09-56-16.082632.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-02T09-56-16.082632.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-02-02T09-56-16.082632.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-02-02T09-56-16.082632.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-02-02T09-56-16.082632.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-02-02T09-56-16.082632.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-02-02T09-56-16.082632.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-02-02T09-56-16.082632.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-02-02T09-56-16.082632.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-02T09-56-16.082632.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-02-02T09-56-16.082632.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-02T09-56-16.082632.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-02T09-56-16.082632.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-02-02T09-56-16.082632.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-02-02T09-56-16.082632.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-02-02T09-56-16.082632.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-02T09-56-16.082632.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-02T09-56-16.082632.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-02T09-56-16.082632.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-02-02T09-56-16.082632.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-02T09-56-16.082632.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-02T09-56-16.082632.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-02T09-56-16.082632.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-02T09-56-16.082632.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-02-02T09-56-16.082632.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-02T09-56-16.082632.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-02T09-56-16.082632.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-02T09-56-16.082632.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-02T09-56-16.082632.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-02-02T09-56-16.082632.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-02-02T09-56-16.082632.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-02-02T09-56-16.082632.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-02-02T09-56-16.082632.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-02T09-56-16.082632.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-02-02T09-56-16.082632.parquet", "**/details_harness|hendrycksTest-management|5_2024-02-02T09-56-16.082632.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-02-02T09-56-16.082632.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-02-02T09-56-16.082632.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-02-02T09-56-16.082632.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-02-02T09-56-16.082632.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-02T09-56-16.082632.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-02-02T09-56-16.082632.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-02-02T09-56-16.082632.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-02-02T09-56-16.082632.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-02-02T09-56-16.082632.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-02-02T09-56-16.082632.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-02-02T09-56-16.082632.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-02-02T09-56-16.082632.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-02-02T09-56-16.082632.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-02-02T09-56-16.082632.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-02-02T09-56-16.082632.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-02T09-56-16.082632.parquet", "**/details_harness|hendrycksTest-virology|5_2024-02-02T09-56-16.082632.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-02-02T09-56-16.082632.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_02_02T09_56_16.082632", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-02T09-56-16.082632.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-02T09-56-16.082632.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_02_02T09_56_16.082632", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-02-02T09-56-16.082632.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-02-02T09-56-16.082632.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_02_02T09_56_16.082632", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-02-02T09-56-16.082632.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-02-02T09-56-16.082632.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_02_02T09_56_16.082632", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-02-02T09-56-16.082632.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-02-02T09-56-16.082632.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_02_02T09_56_16.082632", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-02T09-56-16.082632.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-02T09-56-16.082632.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_02_02T09_56_16.082632", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-02-02T09-56-16.082632.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-02-02T09-56-16.082632.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_02_02T09_56_16.082632", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-02-02T09-56-16.082632.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-02-02T09-56-16.082632.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_02_02T09_56_16.082632", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-02-02T09-56-16.082632.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-02-02T09-56-16.082632.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_02_02T09_56_16.082632", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-02-02T09-56-16.082632.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-02-02T09-56-16.082632.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_02_02T09_56_16.082632", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-02-02T09-56-16.082632.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-02-02T09-56-16.082632.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_02_02T09_56_16.082632", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-02-02T09-56-16.082632.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-02-02T09-56-16.082632.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_02_02T09_56_16.082632", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-02-02T09-56-16.082632.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-02-02T09-56-16.082632.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_02_02T09_56_16.082632", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-02T09-56-16.082632.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-02T09-56-16.082632.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_02_02T09_56_16.082632", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-02-02T09-56-16.082632.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-02-02T09-56-16.082632.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_02_02T09_56_16.082632", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-02T09-56-16.082632.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-02T09-56-16.082632.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_02_02T09_56_16.082632", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-02T09-56-16.082632.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-02T09-56-16.082632.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_02_02T09_56_16.082632", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-02-02T09-56-16.082632.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-02-02T09-56-16.082632.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_02_02T09_56_16.082632", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-02-02T09-56-16.082632.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-02-02T09-56-16.082632.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_02_02T09_56_16.082632", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-02-02T09-56-16.082632.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-02-02T09-56-16.082632.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_02_02T09_56_16.082632", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-02T09-56-16.082632.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-02T09-56-16.082632.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_02_02T09_56_16.082632", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-02T09-56-16.082632.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-02T09-56-16.082632.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_02_02T09_56_16.082632", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-02T09-56-16.082632.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-02T09-56-16.082632.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_02_02T09_56_16.082632", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-02-02T09-56-16.082632.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-02-02T09-56-16.082632.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_02_02T09_56_16.082632", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-02T09-56-16.082632.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-02T09-56-16.082632.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_02_02T09_56_16.082632", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-02T09-56-16.082632.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-02T09-56-16.082632.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_02_02T09_56_16.082632", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-02T09-56-16.082632.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-02T09-56-16.082632.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_02_02T09_56_16.082632", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-02T09-56-16.082632.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-02T09-56-16.082632.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_02_02T09_56_16.082632", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-02-02T09-56-16.082632.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-02-02T09-56-16.082632.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_02_02T09_56_16.082632", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-02T09-56-16.082632.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-02T09-56-16.082632.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_02_02T09_56_16.082632", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-02T09-56-16.082632.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-02T09-56-16.082632.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_02_02T09_56_16.082632", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-02T09-56-16.082632.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-02T09-56-16.082632.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_02_02T09_56_16.082632", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-02T09-56-16.082632.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-02T09-56-16.082632.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_02_02T09_56_16.082632", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-02-02T09-56-16.082632.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-02-02T09-56-16.082632.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_02_02T09_56_16.082632", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-02-02T09-56-16.082632.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-02-02T09-56-16.082632.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_02_02T09_56_16.082632", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-02-02T09-56-16.082632.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-02-02T09-56-16.082632.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_02_02T09_56_16.082632", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-02-02T09-56-16.082632.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-02-02T09-56-16.082632.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_02_02T09_56_16.082632", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-02T09-56-16.082632.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-02T09-56-16.082632.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_02_02T09_56_16.082632", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-02-02T09-56-16.082632.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-02-02T09-56-16.082632.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_02_02T09_56_16.082632", "path": ["**/details_harness|hendrycksTest-management|5_2024-02-02T09-56-16.082632.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-02-02T09-56-16.082632.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_02_02T09_56_16.082632", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-02-02T09-56-16.082632.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-02-02T09-56-16.082632.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_02_02T09_56_16.082632", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-02-02T09-56-16.082632.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-02-02T09-56-16.082632.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_02_02T09_56_16.082632", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-02-02T09-56-16.082632.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-02-02T09-56-16.082632.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_02_02T09_56_16.082632", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-02-02T09-56-16.082632.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-02-02T09-56-16.082632.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_02_02T09_56_16.082632", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-02T09-56-16.082632.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-02T09-56-16.082632.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_02_02T09_56_16.082632", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-02-02T09-56-16.082632.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-02-02T09-56-16.082632.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_02_02T09_56_16.082632", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-02-02T09-56-16.082632.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-02-02T09-56-16.082632.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_02_02T09_56_16.082632", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-02-02T09-56-16.082632.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-02-02T09-56-16.082632.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_02_02T09_56_16.082632", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-02-02T09-56-16.082632.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-02-02T09-56-16.082632.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_02_02T09_56_16.082632", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-02-02T09-56-16.082632.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-02-02T09-56-16.082632.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_02_02T09_56_16.082632", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-02-02T09-56-16.082632.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-02-02T09-56-16.082632.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_02_02T09_56_16.082632", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-02-02T09-56-16.082632.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-02-02T09-56-16.082632.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_02_02T09_56_16.082632", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-02-02T09-56-16.082632.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-02-02T09-56-16.082632.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_02_02T09_56_16.082632", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-02-02T09-56-16.082632.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-02-02T09-56-16.082632.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_02_02T09_56_16.082632", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-02-02T09-56-16.082632.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-02-02T09-56-16.082632.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_02_02T09_56_16.082632", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-02T09-56-16.082632.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-02T09-56-16.082632.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_02_02T09_56_16.082632", "path": ["**/details_harness|hendrycksTest-virology|5_2024-02-02T09-56-16.082632.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-02-02T09-56-16.082632.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_02_02T09_56_16.082632", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-02-02T09-56-16.082632.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-02-02T09-56-16.082632.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_02_02T09_56_16.082632", "path": ["**/details_harness|truthfulqa:mc|0_2024-02-02T09-56-16.082632.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-02-02T09-56-16.082632.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_02_02T09_56_16.082632", "path": ["**/details_harness|winogrande|5_2024-02-02T09-56-16.082632.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-02-02T09-56-16.082632.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_02_02T09_56_16.082632", "path": ["results_2024-02-02T09-56-16.082632.parquet"]}, {"split": "latest", "path": ["results_2024-02-02T09-56-16.082632.parquet"]}]}]}
2024-02-02T09:59:03+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of JaeyeonKang/CCK-v2.0-DPO Dataset automatically created during the evaluation run of model JaeyeonKang/CCK-v2.0-DPO on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-02-02T09:56:16.082632(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of JaeyeonKang/CCK-v2.0-DPO\n\n\n\nDataset automatically created during the evaluation run of model JaeyeonKang/CCK-v2.0-DPO on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-02-02T09:56:16.082632(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of JaeyeonKang/CCK-v2.0-DPO\n\n\n\nDataset automatically created during the evaluation run of model JaeyeonKang/CCK-v2.0-DPO on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-02-02T09:56:16.082632(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
2f59403de10cb0ecbf27ceecce792583abe8f356
<p style="text-align:center; font-size:2em; font-weight:bold;">SynthCI-30M</p> <div style="display: flex; justify-content: center; align-items: center; height: 100%;"> <img src="https://i.ibb.co/kDv612p/ef8b63cb-ce63-4246-8aab-6535711f61f5.webp" alt="Alt text" style="max-width:70%; height:auto;"> </div> This repo contains SynthCI-30M which is the dataset proposed in "SynthCLIP: Are We Ready For a Fully Synthetic CLIP Training?". The dataset contains 30M synthetic text-image pairs covering a wide range of concepts. <div style="text-align:center;"> <p><em>"We will reach a time where machines will create machines."</em></p> </div> ## Abstract We present SynthCLIP, a novel framework for training CLIP models with entirely synthetic text-image pairs, significantly departing from previous methods relying on real data. Leveraging recent text-to-image (TTI) generative networks and large language models (LLM), we are able to generate synthetic datasets of images and corresponding captions at any scale, with no human intervention. With training at scale, SynthCLIP achieves performance comparable to CLIP models trained on real datasets. We also introduce SynthCI-30M, a purely synthetic dataset comprising 30 million captioned images. ## Structure * `SynthCI-30/combined_images_and_captions.csv` contains the image paths with corresponding captions * `SynthCI-30/data` contains 3039 zip files each containing 10K images. ## Citation ``` @misc{hammoud2024synthclip, title={SynthCLIP: Are We Ready for a Fully Synthetic CLIP Training?}, author={Hasan Abed Al Kader Hammoud and Hani Itani and Fabio Pizzati and Philip Torr and Adel Bibi and Bernard Ghanem}, year={2024}, eprint={2402.01832}, archivePrefix={arXiv}, primaryClass={cs.CV} } ```
hammh0a/SynthCLIP
[ "license:cc-by-nc-4.0", "arxiv:2402.01832", "region:us" ]
2024-02-02T10:08:04+00:00
{"license": "cc-by-nc-4.0"}
2024-02-06T17:22:28+00:00
[ "2402.01832" ]
[]
TAGS #license-cc-by-nc-4.0 #arxiv-2402.01832 #region-us
<p style="text-align:center; font-size:2em; font-weight:bold;">SynthCI-30M</p> <div style="display: flex; justify-content: center; align-items: center; height: 100%;"> <img src="https://i.URL alt="Alt text" style="max-width:70%; height:auto;"> </div> This repo contains SynthCI-30M which is the dataset proposed in "SynthCLIP: Are We Ready For a Fully Synthetic CLIP Training?". The dataset contains 30M synthetic text-image pairs covering a wide range of concepts. <div style="text-align:center;"> <p><em>"We will reach a time where machines will create machines."</em></p> </div> ## Abstract We present SynthCLIP, a novel framework for training CLIP models with entirely synthetic text-image pairs, significantly departing from previous methods relying on real data. Leveraging recent text-to-image (TTI) generative networks and large language models (LLM), we are able to generate synthetic datasets of images and corresponding captions at any scale, with no human intervention. With training at scale, SynthCLIP achieves performance comparable to CLIP models trained on real datasets. We also introduce SynthCI-30M, a purely synthetic dataset comprising 30 million captioned images. ## Structure * 'SynthCI-30/combined_images_and_captions.csv' contains the image paths with corresponding captions * 'SynthCI-30/data' contains 3039 zip files each containing 10K images.
[ "## Abstract\nWe present SynthCLIP, a novel framework for training CLIP models with entirely synthetic text-image pairs, significantly departing from previous methods relying on real data. Leveraging recent text-to-image (TTI) generative networks and large language models (LLM), we are able to generate synthetic datasets of images and corresponding captions at any scale, with no human intervention. With training at scale, SynthCLIP achieves performance comparable to CLIP models trained on real datasets. We also introduce SynthCI-30M, a purely synthetic dataset comprising 30 million captioned images.", "## Structure\n* 'SynthCI-30/combined_images_and_captions.csv' contains the image paths with corresponding captions \n* 'SynthCI-30/data' contains 3039 zip files each containing 10K images." ]
[ "TAGS\n#license-cc-by-nc-4.0 #arxiv-2402.01832 #region-us \n", "## Abstract\nWe present SynthCLIP, a novel framework for training CLIP models with entirely synthetic text-image pairs, significantly departing from previous methods relying on real data. Leveraging recent text-to-image (TTI) generative networks and large language models (LLM), we are able to generate synthetic datasets of images and corresponding captions at any scale, with no human intervention. With training at scale, SynthCLIP achieves performance comparable to CLIP models trained on real datasets. We also introduce SynthCI-30M, a purely synthetic dataset comprising 30 million captioned images.", "## Structure\n* 'SynthCI-30/combined_images_and_captions.csv' contains the image paths with corresponding captions \n* 'SynthCI-30/data' contains 3039 zip files each containing 10K images." ]
339928f733ce97617bc6fe6b27e129fdd62156e6
# Dataset Card for Evaluation run of JaeyeonKang/CCK_Gony_v3.1 <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [JaeyeonKang/CCK_Gony_v3.1](https://huggingface.co/JaeyeonKang/CCK_Gony_v3.1) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_JaeyeonKang__CCK_Gony_v3.1", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-02-02T10:33:39.706568](https://huggingface.co/datasets/open-llm-leaderboard/details_JaeyeonKang__CCK_Gony_v3.1/blob/main/results_2024-02-02T10-33-39.706568.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.7105000081818386, "acc_stderr": 0.03032838391908696, "acc_norm": 0.714514429689473, "acc_norm_stderr": 0.030912186715128652, "mc1": 0.49326805385556916, "mc1_stderr": 0.017501914492655382, "mc2": 0.6416631315674713, "mc2_stderr": 0.015126404162356517 }, "harness|arc:challenge|25": { "acc": 0.6638225255972696, "acc_stderr": 0.013804855026205765, "acc_norm": 0.6962457337883959, "acc_norm_stderr": 0.013438909184778768 }, "harness|hellaswag|10": { "acc": 0.6838279227245568, "acc_stderr": 0.004640306719628064, "acc_norm": 0.8745269866560446, "acc_norm_stderr": 0.003305774980082256 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.4, "acc_stderr": 0.04923659639173309, "acc_norm": 0.4, "acc_norm_stderr": 0.04923659639173309 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.6814814814814815, "acc_stderr": 0.04024778401977108, "acc_norm": 0.6814814814814815, "acc_norm_stderr": 0.04024778401977108 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.7828947368421053, "acc_stderr": 0.033550453048829254, "acc_norm": 0.7828947368421053, "acc_norm_stderr": 0.033550453048829254 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.73, "acc_stderr": 0.0446196043338474, "acc_norm": 0.73, "acc_norm_stderr": 0.0446196043338474 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.7811320754716982, "acc_stderr": 0.02544786382510861, "acc_norm": 0.7811320754716982, "acc_norm_stderr": 0.02544786382510861 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.8194444444444444, "acc_stderr": 0.032166008088022675, "acc_norm": 0.8194444444444444, "acc_norm_stderr": 0.032166008088022675 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.51, "acc_stderr": 0.05024183937956912, "acc_norm": 0.51, "acc_norm_stderr": 0.05024183937956912 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.63, "acc_stderr": 0.048523658709391, "acc_norm": 0.63, "acc_norm_stderr": 0.048523658709391 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.44, "acc_stderr": 0.04988876515698589, "acc_norm": 0.44, "acc_norm_stderr": 0.04988876515698589 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.7630057803468208, "acc_stderr": 0.032424147574830975, "acc_norm": 0.7630057803468208, "acc_norm_stderr": 0.032424147574830975 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.4411764705882353, "acc_stderr": 0.049406356306056595, "acc_norm": 0.4411764705882353, "acc_norm_stderr": 0.049406356306056595 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.8, "acc_stderr": 0.04020151261036845, "acc_norm": 0.8, "acc_norm_stderr": 0.04020151261036845 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.6595744680851063, "acc_stderr": 0.030976692998534443, "acc_norm": 0.6595744680851063, "acc_norm_stderr": 0.030976692998534443 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.6052631578947368, "acc_stderr": 0.04598188057816542, "acc_norm": 0.6052631578947368, "acc_norm_stderr": 0.04598188057816542 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.6551724137931034, "acc_stderr": 0.03960933549451207, "acc_norm": 0.6551724137931034, "acc_norm_stderr": 0.03960933549451207 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.48412698412698413, "acc_stderr": 0.025738330639412152, "acc_norm": 0.48412698412698413, "acc_norm_stderr": 0.025738330639412152 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.5238095238095238, "acc_stderr": 0.04467062628403273, "acc_norm": 0.5238095238095238, "acc_norm_stderr": 0.04467062628403273 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.43, "acc_stderr": 0.049756985195624284, "acc_norm": 0.43, "acc_norm_stderr": 0.049756985195624284 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.8483870967741935, "acc_stderr": 0.02040261665441676, "acc_norm": 0.8483870967741935, "acc_norm_stderr": 0.02040261665441676 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.6206896551724138, "acc_stderr": 0.034139638059062345, "acc_norm": 0.6206896551724138, "acc_norm_stderr": 0.034139638059062345 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.76, "acc_stderr": 0.04292346959909281, "acc_norm": 0.76, "acc_norm_stderr": 0.04292346959909281 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.793939393939394, "acc_stderr": 0.0315841532404771, "acc_norm": 0.793939393939394, "acc_norm_stderr": 0.0315841532404771 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.8636363636363636, "acc_stderr": 0.024450155973189835, "acc_norm": 0.8636363636363636, "acc_norm_stderr": 0.024450155973189835 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.9533678756476683, "acc_stderr": 0.015216761819262577, "acc_norm": 0.9533678756476683, "acc_norm_stderr": 0.015216761819262577 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.7, "acc_stderr": 0.023234581088428494, "acc_norm": 0.7, "acc_norm_stderr": 0.023234581088428494 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.37407407407407406, "acc_stderr": 0.02950286112895529, "acc_norm": 0.37407407407407406, "acc_norm_stderr": 0.02950286112895529 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.7941176470588235, "acc_stderr": 0.026265024608275882, "acc_norm": 0.7941176470588235, "acc_norm_stderr": 0.026265024608275882 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.4768211920529801, "acc_stderr": 0.04078093859163083, "acc_norm": 0.4768211920529801, "acc_norm_stderr": 0.04078093859163083 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.8807339449541285, "acc_stderr": 0.013895729292588956, "acc_norm": 0.8807339449541285, "acc_norm_stderr": 0.013895729292588956 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.5972222222222222, "acc_stderr": 0.03344887382997866, "acc_norm": 0.5972222222222222, "acc_norm_stderr": 0.03344887382997866 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.8529411764705882, "acc_stderr": 0.024857478080250447, "acc_norm": 0.8529411764705882, "acc_norm_stderr": 0.024857478080250447 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.8481012658227848, "acc_stderr": 0.02336387809663245, "acc_norm": 0.8481012658227848, "acc_norm_stderr": 0.02336387809663245 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.7533632286995515, "acc_stderr": 0.028930413120910877, "acc_norm": 0.7533632286995515, "acc_norm_stderr": 0.028930413120910877 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.8091603053435115, "acc_stderr": 0.034465133507525975, "acc_norm": 0.8091603053435115, "acc_norm_stderr": 0.034465133507525975 }, "harness|hendrycksTest-international_law|5": { "acc": 0.8760330578512396, "acc_stderr": 0.030083098716035202, "acc_norm": 0.8760330578512396, "acc_norm_stderr": 0.030083098716035202 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.8240740740740741, "acc_stderr": 0.036809181416738807, "acc_norm": 0.8240740740740741, "acc_norm_stderr": 0.036809181416738807 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.7975460122699386, "acc_stderr": 0.031570650789119, "acc_norm": 0.7975460122699386, "acc_norm_stderr": 0.031570650789119 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.5803571428571429, "acc_stderr": 0.04684099321077106, "acc_norm": 0.5803571428571429, "acc_norm_stderr": 0.04684099321077106 }, "harness|hendrycksTest-management|5": { "acc": 0.8446601941747572, "acc_stderr": 0.03586594738573974, "acc_norm": 0.8446601941747572, "acc_norm_stderr": 0.03586594738573974 }, "harness|hendrycksTest-marketing|5": { "acc": 0.9273504273504274, "acc_stderr": 0.017004368568132366, "acc_norm": 0.9273504273504274, "acc_norm_stderr": 0.017004368568132366 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.78, "acc_stderr": 0.041633319989322626, "acc_norm": 0.78, "acc_norm_stderr": 0.041633319989322626 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.8773946360153256, "acc_stderr": 0.011728672144131563, "acc_norm": 0.8773946360153256, "acc_norm_stderr": 0.011728672144131563 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.7832369942196532, "acc_stderr": 0.022183477668412856, "acc_norm": 0.7832369942196532, "acc_norm_stderr": 0.022183477668412856 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.4592178770949721, "acc_stderr": 0.016666783616525776, "acc_norm": 0.4592178770949721, "acc_norm_stderr": 0.016666783616525776 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.826797385620915, "acc_stderr": 0.021668400256514266, "acc_norm": 0.826797385620915, "acc_norm_stderr": 0.021668400256514266 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.7942122186495176, "acc_stderr": 0.022961339906764244, "acc_norm": 0.7942122186495176, "acc_norm_stderr": 0.022961339906764244 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.8271604938271605, "acc_stderr": 0.021038517770157358, "acc_norm": 0.8271604938271605, "acc_norm_stderr": 0.021038517770157358 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.549645390070922, "acc_stderr": 0.02968010556502904, "acc_norm": 0.549645390070922, "acc_norm_stderr": 0.02968010556502904 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.5417209908735332, "acc_stderr": 0.012725701656953642, "acc_norm": 0.5417209908735332, "acc_norm_stderr": 0.012725701656953642 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.7977941176470589, "acc_stderr": 0.024398192986654924, "acc_norm": 0.7977941176470589, "acc_norm_stderr": 0.024398192986654924 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.7663398692810458, "acc_stderr": 0.017119158496044506, "acc_norm": 0.7663398692810458, "acc_norm_stderr": 0.017119158496044506 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.7090909090909091, "acc_stderr": 0.04350271442923243, "acc_norm": 0.7090909090909091, "acc_norm_stderr": 0.04350271442923243 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.7836734693877551, "acc_stderr": 0.026358916334904028, "acc_norm": 0.7836734693877551, "acc_norm_stderr": 0.026358916334904028 }, "harness|hendrycksTest-sociology|5": { "acc": 0.8905472636815921, "acc_stderr": 0.02207632610182466, "acc_norm": 0.8905472636815921, "acc_norm_stderr": 0.02207632610182466 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.89, "acc_stderr": 0.03144660377352203, "acc_norm": 0.89, "acc_norm_stderr": 0.03144660377352203 }, "harness|hendrycksTest-virology|5": { "acc": 0.5120481927710844, "acc_stderr": 0.03891364495835816, "acc_norm": 0.5120481927710844, "acc_norm_stderr": 0.03891364495835816 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.8830409356725146, "acc_stderr": 0.02464806896136615, "acc_norm": 0.8830409356725146, "acc_norm_stderr": 0.02464806896136615 }, "harness|truthfulqa:mc|0": { "mc1": 0.49326805385556916, "mc1_stderr": 0.017501914492655382, "mc2": 0.6416631315674713, "mc2_stderr": 0.015126404162356517 }, "harness|winogrande|5": { "acc": 0.8113654301499605, "acc_stderr": 0.010995172318019808 }, "harness|gsm8k|5": { "acc": 0.5959059893858984, "acc_stderr": 0.013516752972721723 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_JaeyeonKang__CCK_Gony_v3.1
[ "region:us" ]
2024-02-02T10:35:57+00:00
{"pretty_name": "Evaluation run of JaeyeonKang/CCK_Gony_v3.1", "dataset_summary": "Dataset automatically created during the evaluation run of model [JaeyeonKang/CCK_Gony_v3.1](https://huggingface.co/JaeyeonKang/CCK_Gony_v3.1) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_JaeyeonKang__CCK_Gony_v3.1\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-02-02T10:33:39.706568](https://huggingface.co/datasets/open-llm-leaderboard/details_JaeyeonKang__CCK_Gony_v3.1/blob/main/results_2024-02-02T10-33-39.706568.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.7105000081818386,\n \"acc_stderr\": 0.03032838391908696,\n \"acc_norm\": 0.714514429689473,\n \"acc_norm_stderr\": 0.030912186715128652,\n \"mc1\": 0.49326805385556916,\n \"mc1_stderr\": 0.017501914492655382,\n \"mc2\": 0.6416631315674713,\n \"mc2_stderr\": 0.015126404162356517\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.6638225255972696,\n \"acc_stderr\": 0.013804855026205765,\n \"acc_norm\": 0.6962457337883959,\n \"acc_norm_stderr\": 0.013438909184778768\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.6838279227245568,\n \"acc_stderr\": 0.004640306719628064,\n \"acc_norm\": 0.8745269866560446,\n \"acc_norm_stderr\": 0.003305774980082256\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.4,\n \"acc_stderr\": 0.04923659639173309,\n \"acc_norm\": 0.4,\n \"acc_norm_stderr\": 0.04923659639173309\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.6814814814814815,\n \"acc_stderr\": 0.04024778401977108,\n \"acc_norm\": 0.6814814814814815,\n \"acc_norm_stderr\": 0.04024778401977108\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.7828947368421053,\n \"acc_stderr\": 0.033550453048829254,\n \"acc_norm\": 0.7828947368421053,\n \"acc_norm_stderr\": 0.033550453048829254\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.73,\n \"acc_stderr\": 0.0446196043338474,\n \"acc_norm\": 0.73,\n \"acc_norm_stderr\": 0.0446196043338474\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.7811320754716982,\n \"acc_stderr\": 0.02544786382510861,\n \"acc_norm\": 0.7811320754716982,\n \"acc_norm_stderr\": 0.02544786382510861\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.8194444444444444,\n \"acc_stderr\": 0.032166008088022675,\n \"acc_norm\": 0.8194444444444444,\n \"acc_norm_stderr\": 0.032166008088022675\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.51,\n \"acc_stderr\": 0.05024183937956912,\n \"acc_norm\": 0.51,\n \"acc_norm_stderr\": 0.05024183937956912\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.63,\n \"acc_stderr\": 0.048523658709391,\n \"acc_norm\": 0.63,\n \"acc_norm_stderr\": 0.048523658709391\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.44,\n \"acc_stderr\": 0.04988876515698589,\n \"acc_norm\": 0.44,\n \"acc_norm_stderr\": 0.04988876515698589\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.7630057803468208,\n \"acc_stderr\": 0.032424147574830975,\n \"acc_norm\": 0.7630057803468208,\n \"acc_norm_stderr\": 0.032424147574830975\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.4411764705882353,\n \"acc_stderr\": 0.049406356306056595,\n \"acc_norm\": 0.4411764705882353,\n \"acc_norm_stderr\": 0.049406356306056595\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.8,\n \"acc_stderr\": 0.04020151261036845,\n \"acc_norm\": 0.8,\n \"acc_norm_stderr\": 0.04020151261036845\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.6595744680851063,\n \"acc_stderr\": 0.030976692998534443,\n \"acc_norm\": 0.6595744680851063,\n \"acc_norm_stderr\": 0.030976692998534443\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.6052631578947368,\n \"acc_stderr\": 0.04598188057816542,\n \"acc_norm\": 0.6052631578947368,\n \"acc_norm_stderr\": 0.04598188057816542\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.6551724137931034,\n \"acc_stderr\": 0.03960933549451207,\n \"acc_norm\": 0.6551724137931034,\n \"acc_norm_stderr\": 0.03960933549451207\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.48412698412698413,\n \"acc_stderr\": 0.025738330639412152,\n \"acc_norm\": 0.48412698412698413,\n \"acc_norm_stderr\": 0.025738330639412152\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.5238095238095238,\n \"acc_stderr\": 0.04467062628403273,\n \"acc_norm\": 0.5238095238095238,\n \"acc_norm_stderr\": 0.04467062628403273\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.43,\n \"acc_stderr\": 0.049756985195624284,\n \"acc_norm\": 0.43,\n \"acc_norm_stderr\": 0.049756985195624284\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.8483870967741935,\n \"acc_stderr\": 0.02040261665441676,\n \"acc_norm\": 0.8483870967741935,\n \"acc_norm_stderr\": 0.02040261665441676\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.6206896551724138,\n \"acc_stderr\": 0.034139638059062345,\n \"acc_norm\": 0.6206896551724138,\n \"acc_norm_stderr\": 0.034139638059062345\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.76,\n \"acc_stderr\": 0.04292346959909281,\n \"acc_norm\": 0.76,\n \"acc_norm_stderr\": 0.04292346959909281\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.793939393939394,\n \"acc_stderr\": 0.0315841532404771,\n \"acc_norm\": 0.793939393939394,\n \"acc_norm_stderr\": 0.0315841532404771\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.8636363636363636,\n \"acc_stderr\": 0.024450155973189835,\n \"acc_norm\": 0.8636363636363636,\n \"acc_norm_stderr\": 0.024450155973189835\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.9533678756476683,\n \"acc_stderr\": 0.015216761819262577,\n \"acc_norm\": 0.9533678756476683,\n \"acc_norm_stderr\": 0.015216761819262577\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.7,\n \"acc_stderr\": 0.023234581088428494,\n \"acc_norm\": 0.7,\n \"acc_norm_stderr\": 0.023234581088428494\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.37407407407407406,\n \"acc_stderr\": 0.02950286112895529,\n \"acc_norm\": 0.37407407407407406,\n \"acc_norm_stderr\": 0.02950286112895529\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.7941176470588235,\n \"acc_stderr\": 0.026265024608275882,\n \"acc_norm\": 0.7941176470588235,\n \"acc_norm_stderr\": 0.026265024608275882\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.4768211920529801,\n \"acc_stderr\": 0.04078093859163083,\n \"acc_norm\": 0.4768211920529801,\n \"acc_norm_stderr\": 0.04078093859163083\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.8807339449541285,\n \"acc_stderr\": 0.013895729292588956,\n \"acc_norm\": 0.8807339449541285,\n \"acc_norm_stderr\": 0.013895729292588956\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.5972222222222222,\n \"acc_stderr\": 0.03344887382997866,\n \"acc_norm\": 0.5972222222222222,\n \"acc_norm_stderr\": 0.03344887382997866\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.8529411764705882,\n \"acc_stderr\": 0.024857478080250447,\n \"acc_norm\": 0.8529411764705882,\n \"acc_norm_stderr\": 0.024857478080250447\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.8481012658227848,\n \"acc_stderr\": 0.02336387809663245,\n \"acc_norm\": 0.8481012658227848,\n \"acc_norm_stderr\": 0.02336387809663245\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.7533632286995515,\n \"acc_stderr\": 0.028930413120910877,\n \"acc_norm\": 0.7533632286995515,\n \"acc_norm_stderr\": 0.028930413120910877\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.8091603053435115,\n \"acc_stderr\": 0.034465133507525975,\n \"acc_norm\": 0.8091603053435115,\n \"acc_norm_stderr\": 0.034465133507525975\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.8760330578512396,\n \"acc_stderr\": 0.030083098716035202,\n \"acc_norm\": 0.8760330578512396,\n \"acc_norm_stderr\": 0.030083098716035202\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.8240740740740741,\n \"acc_stderr\": 0.036809181416738807,\n \"acc_norm\": 0.8240740740740741,\n \"acc_norm_stderr\": 0.036809181416738807\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.7975460122699386,\n \"acc_stderr\": 0.031570650789119,\n \"acc_norm\": 0.7975460122699386,\n \"acc_norm_stderr\": 0.031570650789119\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.5803571428571429,\n \"acc_stderr\": 0.04684099321077106,\n \"acc_norm\": 0.5803571428571429,\n \"acc_norm_stderr\": 0.04684099321077106\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.8446601941747572,\n \"acc_stderr\": 0.03586594738573974,\n \"acc_norm\": 0.8446601941747572,\n \"acc_norm_stderr\": 0.03586594738573974\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.9273504273504274,\n \"acc_stderr\": 0.017004368568132366,\n \"acc_norm\": 0.9273504273504274,\n \"acc_norm_stderr\": 0.017004368568132366\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.78,\n \"acc_stderr\": 0.041633319989322626,\n \"acc_norm\": 0.78,\n \"acc_norm_stderr\": 0.041633319989322626\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.8773946360153256,\n \"acc_stderr\": 0.011728672144131563,\n \"acc_norm\": 0.8773946360153256,\n \"acc_norm_stderr\": 0.011728672144131563\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.7832369942196532,\n \"acc_stderr\": 0.022183477668412856,\n \"acc_norm\": 0.7832369942196532,\n \"acc_norm_stderr\": 0.022183477668412856\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.4592178770949721,\n \"acc_stderr\": 0.016666783616525776,\n \"acc_norm\": 0.4592178770949721,\n \"acc_norm_stderr\": 0.016666783616525776\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.826797385620915,\n \"acc_stderr\": 0.021668400256514266,\n \"acc_norm\": 0.826797385620915,\n \"acc_norm_stderr\": 0.021668400256514266\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.7942122186495176,\n \"acc_stderr\": 0.022961339906764244,\n \"acc_norm\": 0.7942122186495176,\n \"acc_norm_stderr\": 0.022961339906764244\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.8271604938271605,\n \"acc_stderr\": 0.021038517770157358,\n \"acc_norm\": 0.8271604938271605,\n \"acc_norm_stderr\": 0.021038517770157358\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.549645390070922,\n \"acc_stderr\": 0.02968010556502904,\n \"acc_norm\": 0.549645390070922,\n \"acc_norm_stderr\": 0.02968010556502904\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.5417209908735332,\n \"acc_stderr\": 0.012725701656953642,\n \"acc_norm\": 0.5417209908735332,\n \"acc_norm_stderr\": 0.012725701656953642\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.7977941176470589,\n \"acc_stderr\": 0.024398192986654924,\n \"acc_norm\": 0.7977941176470589,\n \"acc_norm_stderr\": 0.024398192986654924\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.7663398692810458,\n \"acc_stderr\": 0.017119158496044506,\n \"acc_norm\": 0.7663398692810458,\n \"acc_norm_stderr\": 0.017119158496044506\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.7090909090909091,\n \"acc_stderr\": 0.04350271442923243,\n \"acc_norm\": 0.7090909090909091,\n \"acc_norm_stderr\": 0.04350271442923243\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.7836734693877551,\n \"acc_stderr\": 0.026358916334904028,\n \"acc_norm\": 0.7836734693877551,\n \"acc_norm_stderr\": 0.026358916334904028\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.8905472636815921,\n \"acc_stderr\": 0.02207632610182466,\n \"acc_norm\": 0.8905472636815921,\n \"acc_norm_stderr\": 0.02207632610182466\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.89,\n \"acc_stderr\": 0.03144660377352203,\n \"acc_norm\": 0.89,\n \"acc_norm_stderr\": 0.03144660377352203\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.5120481927710844,\n \"acc_stderr\": 0.03891364495835816,\n \"acc_norm\": 0.5120481927710844,\n \"acc_norm_stderr\": 0.03891364495835816\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.8830409356725146,\n \"acc_stderr\": 0.02464806896136615,\n \"acc_norm\": 0.8830409356725146,\n \"acc_norm_stderr\": 0.02464806896136615\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.49326805385556916,\n \"mc1_stderr\": 0.017501914492655382,\n \"mc2\": 0.6416631315674713,\n \"mc2_stderr\": 0.015126404162356517\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.8113654301499605,\n \"acc_stderr\": 0.010995172318019808\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.5959059893858984,\n \"acc_stderr\": 0.013516752972721723\n }\n}\n```", "repo_url": "https://huggingface.co/JaeyeonKang/CCK_Gony_v3.1", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_02_02T10_33_39.706568", "path": ["**/details_harness|arc:challenge|25_2024-02-02T10-33-39.706568.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-02-02T10-33-39.706568.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_02_02T10_33_39.706568", "path": ["**/details_harness|gsm8k|5_2024-02-02T10-33-39.706568.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-02-02T10-33-39.706568.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_02_02T10_33_39.706568", "path": ["**/details_harness|hellaswag|10_2024-02-02T10-33-39.706568.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-02-02T10-33-39.706568.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_02_02T10_33_39.706568", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-02T10-33-39.706568.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-02-02T10-33-39.706568.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-02-02T10-33-39.706568.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-02-02T10-33-39.706568.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-02T10-33-39.706568.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-02-02T10-33-39.706568.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-02-02T10-33-39.706568.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-02-02T10-33-39.706568.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-02-02T10-33-39.706568.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-02-02T10-33-39.706568.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-02-02T10-33-39.706568.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-02-02T10-33-39.706568.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-02T10-33-39.706568.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-02-02T10-33-39.706568.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-02T10-33-39.706568.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-02T10-33-39.706568.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-02-02T10-33-39.706568.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-02-02T10-33-39.706568.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-02-02T10-33-39.706568.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-02T10-33-39.706568.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-02T10-33-39.706568.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-02T10-33-39.706568.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-02-02T10-33-39.706568.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-02T10-33-39.706568.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-02T10-33-39.706568.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-02T10-33-39.706568.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-02T10-33-39.706568.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-02-02T10-33-39.706568.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-02T10-33-39.706568.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-02T10-33-39.706568.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-02T10-33-39.706568.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-02T10-33-39.706568.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-02-02T10-33-39.706568.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-02-02T10-33-39.706568.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-02-02T10-33-39.706568.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-02-02T10-33-39.706568.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-02T10-33-39.706568.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-02-02T10-33-39.706568.parquet", "**/details_harness|hendrycksTest-management|5_2024-02-02T10-33-39.706568.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-02-02T10-33-39.706568.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-02-02T10-33-39.706568.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-02-02T10-33-39.706568.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-02-02T10-33-39.706568.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-02T10-33-39.706568.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-02-02T10-33-39.706568.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-02-02T10-33-39.706568.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-02-02T10-33-39.706568.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-02-02T10-33-39.706568.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-02-02T10-33-39.706568.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-02-02T10-33-39.706568.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-02-02T10-33-39.706568.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-02-02T10-33-39.706568.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-02-02T10-33-39.706568.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-02-02T10-33-39.706568.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-02T10-33-39.706568.parquet", "**/details_harness|hendrycksTest-virology|5_2024-02-02T10-33-39.706568.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-02-02T10-33-39.706568.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-02T10-33-39.706568.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-02-02T10-33-39.706568.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-02-02T10-33-39.706568.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-02-02T10-33-39.706568.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-02T10-33-39.706568.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-02-02T10-33-39.706568.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-02-02T10-33-39.706568.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-02-02T10-33-39.706568.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-02-02T10-33-39.706568.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-02-02T10-33-39.706568.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-02-02T10-33-39.706568.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-02-02T10-33-39.706568.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-02T10-33-39.706568.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-02-02T10-33-39.706568.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-02T10-33-39.706568.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-02T10-33-39.706568.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-02-02T10-33-39.706568.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-02-02T10-33-39.706568.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-02-02T10-33-39.706568.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-02T10-33-39.706568.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-02T10-33-39.706568.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-02T10-33-39.706568.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-02-02T10-33-39.706568.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-02T10-33-39.706568.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-02T10-33-39.706568.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-02T10-33-39.706568.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-02T10-33-39.706568.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-02-02T10-33-39.706568.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-02T10-33-39.706568.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-02T10-33-39.706568.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-02T10-33-39.706568.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-02T10-33-39.706568.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-02-02T10-33-39.706568.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-02-02T10-33-39.706568.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-02-02T10-33-39.706568.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-02-02T10-33-39.706568.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-02T10-33-39.706568.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-02-02T10-33-39.706568.parquet", "**/details_harness|hendrycksTest-management|5_2024-02-02T10-33-39.706568.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-02-02T10-33-39.706568.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-02-02T10-33-39.706568.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-02-02T10-33-39.706568.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-02-02T10-33-39.706568.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-02T10-33-39.706568.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-02-02T10-33-39.706568.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-02-02T10-33-39.706568.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-02-02T10-33-39.706568.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-02-02T10-33-39.706568.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-02-02T10-33-39.706568.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-02-02T10-33-39.706568.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-02-02T10-33-39.706568.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-02-02T10-33-39.706568.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-02-02T10-33-39.706568.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-02-02T10-33-39.706568.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-02T10-33-39.706568.parquet", "**/details_harness|hendrycksTest-virology|5_2024-02-02T10-33-39.706568.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-02-02T10-33-39.706568.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_02_02T10_33_39.706568", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-02T10-33-39.706568.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-02T10-33-39.706568.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_02_02T10_33_39.706568", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-02-02T10-33-39.706568.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-02-02T10-33-39.706568.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_02_02T10_33_39.706568", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-02-02T10-33-39.706568.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-02-02T10-33-39.706568.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_02_02T10_33_39.706568", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-02-02T10-33-39.706568.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-02-02T10-33-39.706568.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_02_02T10_33_39.706568", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-02T10-33-39.706568.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-02T10-33-39.706568.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_02_02T10_33_39.706568", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-02-02T10-33-39.706568.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-02-02T10-33-39.706568.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_02_02T10_33_39.706568", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-02-02T10-33-39.706568.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-02-02T10-33-39.706568.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_02_02T10_33_39.706568", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-02-02T10-33-39.706568.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-02-02T10-33-39.706568.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_02_02T10_33_39.706568", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-02-02T10-33-39.706568.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-02-02T10-33-39.706568.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_02_02T10_33_39.706568", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-02-02T10-33-39.706568.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-02-02T10-33-39.706568.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_02_02T10_33_39.706568", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-02-02T10-33-39.706568.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-02-02T10-33-39.706568.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_02_02T10_33_39.706568", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-02-02T10-33-39.706568.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-02-02T10-33-39.706568.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_02_02T10_33_39.706568", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-02T10-33-39.706568.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-02T10-33-39.706568.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_02_02T10_33_39.706568", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-02-02T10-33-39.706568.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-02-02T10-33-39.706568.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_02_02T10_33_39.706568", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-02T10-33-39.706568.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-02T10-33-39.706568.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_02_02T10_33_39.706568", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-02T10-33-39.706568.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-02T10-33-39.706568.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_02_02T10_33_39.706568", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-02-02T10-33-39.706568.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-02-02T10-33-39.706568.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_02_02T10_33_39.706568", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-02-02T10-33-39.706568.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-02-02T10-33-39.706568.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_02_02T10_33_39.706568", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-02-02T10-33-39.706568.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-02-02T10-33-39.706568.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_02_02T10_33_39.706568", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-02T10-33-39.706568.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-02T10-33-39.706568.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_02_02T10_33_39.706568", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-02T10-33-39.706568.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-02T10-33-39.706568.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_02_02T10_33_39.706568", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-02T10-33-39.706568.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-02T10-33-39.706568.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_02_02T10_33_39.706568", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-02-02T10-33-39.706568.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-02-02T10-33-39.706568.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_02_02T10_33_39.706568", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-02T10-33-39.706568.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-02T10-33-39.706568.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_02_02T10_33_39.706568", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-02T10-33-39.706568.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-02T10-33-39.706568.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_02_02T10_33_39.706568", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-02T10-33-39.706568.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-02T10-33-39.706568.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_02_02T10_33_39.706568", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-02T10-33-39.706568.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-02T10-33-39.706568.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_02_02T10_33_39.706568", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-02-02T10-33-39.706568.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-02-02T10-33-39.706568.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_02_02T10_33_39.706568", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-02T10-33-39.706568.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-02T10-33-39.706568.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_02_02T10_33_39.706568", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-02T10-33-39.706568.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-02T10-33-39.706568.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_02_02T10_33_39.706568", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-02T10-33-39.706568.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-02T10-33-39.706568.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_02_02T10_33_39.706568", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-02T10-33-39.706568.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-02T10-33-39.706568.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_02_02T10_33_39.706568", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-02-02T10-33-39.706568.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-02-02T10-33-39.706568.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_02_02T10_33_39.706568", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-02-02T10-33-39.706568.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-02-02T10-33-39.706568.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_02_02T10_33_39.706568", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-02-02T10-33-39.706568.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-02-02T10-33-39.706568.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_02_02T10_33_39.706568", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-02-02T10-33-39.706568.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-02-02T10-33-39.706568.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_02_02T10_33_39.706568", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-02T10-33-39.706568.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-02T10-33-39.706568.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_02_02T10_33_39.706568", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-02-02T10-33-39.706568.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-02-02T10-33-39.706568.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_02_02T10_33_39.706568", "path": ["**/details_harness|hendrycksTest-management|5_2024-02-02T10-33-39.706568.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-02-02T10-33-39.706568.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_02_02T10_33_39.706568", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-02-02T10-33-39.706568.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-02-02T10-33-39.706568.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_02_02T10_33_39.706568", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-02-02T10-33-39.706568.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-02-02T10-33-39.706568.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_02_02T10_33_39.706568", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-02-02T10-33-39.706568.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-02-02T10-33-39.706568.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_02_02T10_33_39.706568", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-02-02T10-33-39.706568.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-02-02T10-33-39.706568.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_02_02T10_33_39.706568", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-02T10-33-39.706568.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-02T10-33-39.706568.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_02_02T10_33_39.706568", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-02-02T10-33-39.706568.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-02-02T10-33-39.706568.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_02_02T10_33_39.706568", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-02-02T10-33-39.706568.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-02-02T10-33-39.706568.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_02_02T10_33_39.706568", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-02-02T10-33-39.706568.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-02-02T10-33-39.706568.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_02_02T10_33_39.706568", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-02-02T10-33-39.706568.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-02-02T10-33-39.706568.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_02_02T10_33_39.706568", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-02-02T10-33-39.706568.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-02-02T10-33-39.706568.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_02_02T10_33_39.706568", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-02-02T10-33-39.706568.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-02-02T10-33-39.706568.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_02_02T10_33_39.706568", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-02-02T10-33-39.706568.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-02-02T10-33-39.706568.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_02_02T10_33_39.706568", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-02-02T10-33-39.706568.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-02-02T10-33-39.706568.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_02_02T10_33_39.706568", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-02-02T10-33-39.706568.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-02-02T10-33-39.706568.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_02_02T10_33_39.706568", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-02-02T10-33-39.706568.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-02-02T10-33-39.706568.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_02_02T10_33_39.706568", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-02T10-33-39.706568.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-02T10-33-39.706568.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_02_02T10_33_39.706568", "path": ["**/details_harness|hendrycksTest-virology|5_2024-02-02T10-33-39.706568.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-02-02T10-33-39.706568.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_02_02T10_33_39.706568", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-02-02T10-33-39.706568.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-02-02T10-33-39.706568.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_02_02T10_33_39.706568", "path": ["**/details_harness|truthfulqa:mc|0_2024-02-02T10-33-39.706568.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-02-02T10-33-39.706568.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_02_02T10_33_39.706568", "path": ["**/details_harness|winogrande|5_2024-02-02T10-33-39.706568.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-02-02T10-33-39.706568.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_02_02T10_33_39.706568", "path": ["results_2024-02-02T10-33-39.706568.parquet"]}, {"split": "latest", "path": ["results_2024-02-02T10-33-39.706568.parquet"]}]}]}
2024-02-02T10:36:23+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of JaeyeonKang/CCK_Gony_v3.1 Dataset automatically created during the evaluation run of model JaeyeonKang/CCK_Gony_v3.1 on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-02-02T10:33:39.706568(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of JaeyeonKang/CCK_Gony_v3.1\n\n\n\nDataset automatically created during the evaluation run of model JaeyeonKang/CCK_Gony_v3.1 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-02-02T10:33:39.706568(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of JaeyeonKang/CCK_Gony_v3.1\n\n\n\nDataset automatically created during the evaluation run of model JaeyeonKang/CCK_Gony_v3.1 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-02-02T10:33:39.706568(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
0ab2ffd07e96caadf95e52a7a4ad6b4495639337
# 📚 BookTection Dataset The BookTection dataset serves as a benchmark designed for the task of detecting pretraining data from Large Language models. The dataset consists of 165 books. - 60 published in 2023: Non-Training data, "_label_" column = 0. - 105 published before 2022: Training data, "_label_" column = 1. From each book ≈ 34 passages are extracted. Each passage is paraphrased 3 times using the Language Model Claude v2.0. <br> The "_Answer_" column indicates which of the passages is the real excerpt.<br> Passages come in 3 different sizes (small, medium and large) which aim to be respectively ≈(64, 128 and 256) tokens in length. <br> # 🧪 Testing Models on BookTection Our dataset is planned to be used on a Multiple-Choice-Question-Answering format. Nonetheless, it is compatible to be used with other pretraining data detection methods.<br> Our [GitHub](https://github.com/avduarte333/DE-COP_Method) repository contains example scripts to evaluate models on our dataset. <br> # 🤝 Compatibility The Multiple-Choice-Question-Answering task with our Dataset is designed to be applied to various models, such as:<br> - LLaMA-2 - Mistral - Mixtral - Chat-GPT (gpt-3.5-turbo-instruct) - GPT-3 (text-davinci-003) - Claude <br> # 🔧 Loading the Dataset ```python from datasets import load_dataset dataset = load_dataset("avduarte333/BookTection") ``` <br> # 💬 Citation ```bibtex @misc{duarte2024decop, title={{DE-COP: Detecting Copyrighted Content in Language Models Training Data}}, author={André V. Duarte and Xuandong Zhao and Arlindo L. Oliveira and Lei Li}, year={2024}, eprint={2402.09910}, archivePrefix={arXiv}, primaryClass={cs.CL} } ``` <details> <summary> 📖 Book References</summary> [1] Orwell, G. (2021). Nineteen Eighty-Four. Penguin Classics.<br> [2] Martin, George R. R., author. (1996). A game of thrones. New York :Bantam Books,<br> [3] Joyce, J. (1992). A portrait of the artist as a young man. Wordsworth Editions.<br> [4] Dickens, C. (2012). A tale of two cities. Penguin Classics.<br> [5] Twain, M. (2010). The adventures of huckleberry Finn. William Collins.<br> [6] Carroll, L. (2015). Alice’s adventures in wonderland (M. Burstein, Ed.). Princeton University Press.<br> [7] Doerr, A. (2015). All the light we cannot see. Fourth Estate.<br> [8] Christie, A. (2003). And then there were none. HarperCollins.<br> [9] Brown, D. (2000). Angels & demons. 1st large print ed. New York, Random House Large Print.<br> [10] Montgomery, L. M. (2018). Anne of Green Gables. Wordsworth Editions.<br> [11] Melville, H. (2004). Bartleby The Scrivener. Melville House Publishing.<br> [12] Sewell, A. (2018). Black Beauty. Wordsworth Editions.<br> [13] Meyer, S. (2009). Breaking dawn. 1st special ed. London, Little, Brown Children.<br> [14] Ian, F. (2002). Casino Royale. Penguin Books.<br> [15] Heller, Joseph, 1798-1849. (1961). Catch-22, a novel. New York :The Modern library,<br> [16] Dahl, R. (2016). Charlie and the chocolate factory. Puffin.<br> [17] Dickens, C., & de Gavin, A. (1992). David Copperfield. Wordsworth Editions.<br> [18] Stoker, B. (1993). Dracula. Wordsworth Editions.<br> [19] Herbert, F. (2006). Dune. Hodder Paperback.<br> [20] Meyer, S. (2008). Eclipse. ATOM.<br> [21] Austen, J. (1992). Emma. Wordsworth Editions.<br> [22] Follett, K. (1978). Eye of the needle: a novel. New York, Arbor House.<br> [23] Bradbury, R. (1992). Fahrenheit 451. Del Rey Books.<br> [24] Jong, E. (1973). Fear of flying. [Book club ed.]. New York, Holt, Rinehart and Winston.<br> [25] James, E. L. (2012). Fifty shades of grey. Random House.<br> [26] James, E. L. (2017). Fifty shades darker. Vintage books movie tie-in edition. New York, Vintage Books.<br> [27] Blyton, Enid. (1942). The Famous Five: Five on a treasure island . : .<br> [28] Shelley, M. (2012). Frankenstein. Penguin Classics.<br> [29] Flynn, G. (2014). Gone Girl. Weidenfeld & Nicolson.<br> [30] Dickens, C. (1992). Great Expectations. Wordsworth Editions.<br> [31] Rowling, J. K. (2014). Harry potter and the chamber of secrets. Bloomsbury Childrens Books.<br> [32] Rowling, J. K. (2014). Harry potter and the deathly hallows. Bloomsbury Childrens Books.<br> [33] Rowling, J. K. (2014). Harry potter and the goblet of fire. Bloomsbury Childrens Books.<br> [34] Rowling, J. K. (2014). Harry potter and the half-blood prince. Bloomsbury Childrens Books.<br> [35] Rowling, J. K. (2014). Harry potter and the order of the Phoenix. Bloomsbury Childrens Books.<br> [36] Rowling, J. K. (2014). Harry potter and the philosopher’s stone. Bloomsbury Childrens Books.<br> [37] Rowling, J. K. (2014). Harry potter and the prisoner of azkaban. Bloomsbury Childrens Books.<br> [38] Adams, D. (2007). The hitchhiker’s guide to the galaxy. Random House.<br> [39] Doyle, S. A. C. (2012). The hound of the baskervilles. Penguin Classics.<br> [40] Collins, S. (2008). The Hunger Games. Scholastic.<br> [41] Collins S. (2011). Catching Fire. Scholastic.<br> [42] Collins, S. (2011). Mockingjay. Scholastic.<br> [43] Brown, D. (2013). Inferno: a novel. 1st large print ed. New York, Random House Large Print.<br> [44] Bronte, C. (1992). Jane Eyre. Wordsworth Editions.<br> [45] Alcott, Louisa May, 1832-1888. (1953). Little women. Melbourne ; London ; Baltimore :Penguin Books,<br> [46] Golding, W. (2011). Lord of the flies. Faber & Faber.<br> [47] Tolkien, J. R. R. (1991). The lord of the rings. HarperCollins.<br> [48] Stone, I. (1984). Lust for life. 50th anniversary ed. New York, A Plume Book.<br> [49] Dahl, R. (1988). Matilda. Penguin.<br> [50] Moyes, J. (2012). Me Before You. Michael Joseph.<br> [51] Melville, Herman, 1819-1891. (2009). Moby-Dick. [Ashland, Or.] :Blackstone Audio,<br> [52] Eco, U. (2004). The name of the Rose. Vintage Classics.<br> [53] Meyer, S. (2008). New moon. 1st pbk. ed. New York, Little, Brown.<br> [54] Maugham, W. S. 1., & Crossley, S. (2010). Of human bondage. Unabridged. Old Saybrook, CT, Tantor Media.<br> [55] Dickens, C. (1992). Oliver Twist. Wordsworth Editions.<br> [56] Brown, D. (2017). Origin: a novel. First large print edition. [New York], Random House Large Print.<br> [57] Süskind, Patrick. (1986). Perfume : the story of a murderer. New York :A.A. Knopf,<br> [58] Austen, J. (2003). Pride and Prejudice (V. Jones, Ed.). Penguin Classics.<br> [59] Clavell, J. (1986). Shogun. Bantam Doubleday Dell Publishing Group.<br> [60] Eliot, G. (2012). Silas Marner. Penguin Classics.<br> [61] Gaarder, Jostein, 1952-. (1994). Sophie's world : a novel about the history of philosophy. New York :Farrar, Straus and Giroux,<br> [62] Burroughs, Edgar Rice, 1875-1950. (2010). Tarzan of the apes. Oxford [England] ; New York :Oxford University Press,<br> [63] Hardy, T. (2012). Tess of the D’Urbervilles. Penguin Classics.<br> [64] Doyle, A. C. (1950). The adventures of Sherlock Holmes. New York, The Heritage Press.<br> [65] Twain, M. (2011). The adventures of tom Sawyer. William Collins.<br> [66] Wharton, E. (1994). The age of innocence. Wordsworth Editions.<br> [67] Coelho, P. (1995). The alchemist. Thorsons.<br> [68] Boyne, John, 1971-. (2006). The boy in the striped pajamas : a fable. New York :David Fickling Books,<br> [69] London, J. (2008). The call of the wild. Puffin Classics.<br> [70] Salinger, J. D. (2001). Catcher in the Rye. Back Bay Books.<br> [71] Lewis, C. S. 1., & Baynes, P. (1994). Prince Caspian. New York, N.Y., HarperTrophy.<br> [72] Lewis, C. S.. (1978). The Chronicles of Narnia : The Lion, the Witch and the Wardrobe . New York: Harper Collins Publisher.<br> [73] Lewis, C. S.. (1980). The Chronicles of Narnia : The Voyage of the Dawn Treader . United States of America: Harper Collins Publisher.<br> [74] Brown, D. (2006). The Da Vinci Code. Corgi Books.<br> [75] Green, J. (2013). The Fault in Our Stars. Penguin Books.<br> [76] Hawkins, Paula. The Girl on the Train. New York :Riverhead Books, a member of Penguin Group (USA), 2015.<br> [77] Larsson, S. (2009). The girl with the dragon tattoo. Quercus Publishing.<br> [78] Lowry, L. (2014). The Giver. HarperCollins.<br> [79] Puzo, M. (2009). The Godfather. Arrow Books.<br> [80] Fitzgerald, F. S. (2019). The great Gatsby. Wordsworth Editions.<br> [81] Tolkien, J. R. R. (2012). The Hobbit. HarperCollins.<br> [82] Hawthorne, Nathaniel, 1804-1864. The House of the Seven Gables. Charlottesville, Va. :University of Virginia Library, 1996.<br> [83] Wells, H. G. 1. (1988). The invisible man. Aerie Books.<br> [84] Brown, D. (2009). The lost symbol: a novel. New York, Doubleday.<br> [85] Britannica, T. Editors of Encyclopaedia (2022, January 10). The Mysteries of Udolpho. Encyclopedia Britannica.<br> [86] Hinton, S. E. (2016). The Outsiders. Penguin Books.<br> [87] Wilde, O. (1992). The picture of Dorian gray. Wordsworth Editions.<br> [88] Follett, K. (1990). Pillars of the earth. New York, N.Y., Signet.<br> [89] Camus, A. (2020). The Plague. Penguin Classics.<br> [90] Hawthorne, N. (2003). Scarlet Letter. Penguin Classics.<br> [91] Townsend, Sue. (2003). The secret diary of Adrian Mole aged 13 3/4 . New York: Harper Collins.<br> [92] Burnett, F. H. (2017). The Secret Garden. Virago Press.<br> [93] Young, W. P. (2008). The Shack. Hodder & Stoughton.<br> [94] Ruiz Zafón, C., & Graves, L. (2005). The shadow of the wind. New York, Penguin Books.<br> [95] Tolkien, J. R. R. (1991). The Silmarillion. HarperCollins.<br> [96] James, H. (1991). The turn of the screw. Dover Publications.<br> [97] Hurston, Z. N. (2018). Their eyes were watching god. Virago Press.<br> [98] Achebe, C. (2006). Things Fall Apart. Penguin Classics.<br> [99] Lee, H. (2010). To kill A mockingbird. Arrow Books.<br> [100] Stevenson, R. L. (2016). Treasure Island. Puffin Classics.<br> [101] Meyer, S. (2007). Twilight. ATOM.<br> [102] Joyce, J. (2010). Ulysses. Wordsworth Editions.<br> [103] Tolstoy, L. (1993). War and peace (L. Maude & A. Maude, Trans.). Wordsworth Editions.<br> [104] Parkins, D., & Adams, R. (2014). Watership Down. Puffin Classics.<br> [105] Bronte, E. (2012). Wuthering Heights. Penguin Classics.<br> [106] Shannon, S. (2023). A day of fallen night. New York, Bloomsbury Publishing.<br> [107] Chung, N. (2023). A living remedy: a memoir. First edition. New York, NY, Ecco.<br> [108] Adebayo, A. (2023). A spell of good things: a novel. First edition. New York, Alfred A. Knopf.<br> [109] Koontz, D. R. 1. (2023). After death. First edition. Seattle, Thomas & Mercer.<br> [110] Patterson, J., & DuBois, B. (2022). Blowback. First edition. New York, Little, Brown and Company.<br> [111] Solomon, R. L. (2023). Business or pleasure. First edition. New York, Berkley Romance.<br> [112] Casati, C. (2023). Clytemnestra: a novel. Naperville, Illinois, Sourcebooks Landmark.<br> [113] Smith, T. R. (2023). Cold people: a novel. First Scribner hardcover edition. New York, Scribner.<br> [114] Frumkin, R. (2023). Confidence: a novel. First Simon & Schuster hardcover edition. New York, Simon & Schuster.<br> [115] Ross, R. (2023). Divine rivals: a novel. First edition. New York, Wednesday Books.<br> [116] Fawcett, H. (2023). Emily Wilde's encyclopaedia of faeries. First edition. New York, Del Rey.<br> [117] Rivero, M. (2023). Flores and Miss Paula: a novel. First edition. New York, Ecco, an imprint of HarperCollins Publishers.<br> [118] Henry, E. (2023). Happy place. New York, Berkley.<br> [119] Delury, J. (2023). Hedge: a novel. New York, Zibby Books.<br> [120] Bardugo, L. (2023). Hell bent. First edition. New York, NY, Flatiron Books.<br> [121] Center, K. (2023). Hello stranger. First edition. New York, St. Martin's Press.<br> [122] Hibbert, T. (2023). Highly suspicious and unfairly cute. First edition. New York, Joy Revolution, of Random House Children's Books.<br> [123] Burton, J. (2023). Housebroke. First edition. New York, Berkley Romance.<br> [124] Hendrix, G. (2023). How to sell a haunted house. New York, Berkley.<br> [125] Makkai, R. (2023). I have some questions for you. [New York], Viking.<br> [126] Klune, T. (2023). In the lives of puppets. First edition. New York, Tor Publishing Group.<br> [127] Boyle, L. (2023). In the Silence of Decay. [United States], Lisa Boyle.<br> [128] See, L. (2023). Lady Tan's circle of women: a novel. First Scribner hardcover edition. New York, Scribner.<br> [129] Hazelwood, A. (2023). Love, theoretically. New York, Berkley.<br> [130] George, J. (2023). Maame. First edition. New York, St. Martin's Press.<br> [131] Brown, A. (2023). Perfect little lives. Toronto, ON, Graydon House.<br> [132] Irby, S. (2023). Quietly hostile: essays. New York, Vintage Books, a division of Penguin Random House LLC.<br> [133] Harding, R. (2023). The drowning woman. First edition. New York, Grand Central Publishing.<br> [134] Sittenfeld, C. (2023). Romantic comedy: a novel. First edition. New York, Random House.<br> [135] Davis-Goff, S. (2023). Silent city. First U.S. Edition. New York, Flatiron Books.<br> [136] Moreno-Garcia, S. (2023). Silver nitrate. First edition. New York, Del Rey.<br> [137] Williams, D. (2023). Technically yours. First edition. New York, Berkley Romance.<br> [138] Page, S. (2023). The Book of Beginnings. HarperCollins.<br> [139] Ward, M. (2023). The Darkness Before Them. Orbit.<br> [140] Kantra, V. (2023). The fairytale life of Dorothy Gale. First Edition. New York, Berkley.<br> [141] Ferguson, L. (2023). The fake mate. First edition. New York, Berkley Romance.<br> [142] Whitten, H. (2023). The foxglove king. First Edition. New York, NY, Orbit.<br> [143] Hay, A. (2023). The housekeepers: a novel. Toronto, Ontario, Canada, Graydon House.<br> [144] Johansen, E. (2023). The Kingdom of Sweets: a novel of The Nutcracker. [New York], Dutton.<br> [145] Jackson, L. (2023). The last sinner. First Kensington hardcover edition. New York, NY, Kensington Publishing Corp.<br> [146] Woods, E. (2023). The lost bookshop. Paperback edition. London, One More Chapter.<br> [147] Sager, R. (2023). The only one left: a novel. New York, Dutton.<br> [148] Ryan, R. (2023). The Paris housekeeper. Toronto, Ontario, Canada, Love Inspired.<br> [149] Hashem, S. (2023). The Jasad heir. First edition. New York, NY, Orbit.<br> [150] Leigh, J. (2023). The Silver Ladies Do Lunch, [United States], Boldwood Books.<br> [151] Davis, F. (2023). The spectacular: a novel. New York, Dutton.<br> [152] Harvey, K. W. (2023). The summer of songbirds: a novel. First Gallery Books hardcover edition. New York, Gallery Books.<br> [153] Lauren, C. (2023). The True Love Experiment. First Gallery Books hardcover edition. New York, Gallery Books.<br> [154] Lemmie, A. (2023). The wildest sun: a novel. [New York], Dutton.<br> [155] Hatzopoulou, K. (2023). Threads that bind. New York, Razorbill.<br> [156] Harkaway, N. (2023). Titanium noir: a novel. First edition. New York, Alfred A. Knopf.<br> [157] Adams, K. J. (2023). Tonight, I burn. First edition. New York, NY, Orbit.<br> [158] Parks, A. (2023). Two dead wives. Toronto, Mira.<br> [159] Bailey, T. (2023). Unfortunately yours: a novel. First edition. New York, NY, Avon, an imprint of HarperCollinsPublishers.<br> [160] McGinnis, M. W. (2023). USS Lincoln: Mercy Kill. Avenstar Productions<br> [161] Sebastian, C. (2023). We could be so good: a novel. First edition. New York, NY, Avon, an imprint of HarperCollinsPublishers.<br> [162] LeCheminant, D. (2023). What Dreams May Come. Covenant Communications.<br> [163] Marshall, K. A. (2023). What lies in the woods. First edition. New York, Flatiron Books.<br> [164] Kuang, R. F. (2023). Yellowface: a novel. First edition. New York, NY, William Morrow, an imprint of HarperCollinsPublishers.<br> [165] Bayron, K. (2023). You're not supposed to die tonight. New York, Bloomsbury YA.<br> </details>
avduarte333/BookTection
[ "task_categories:question-answering", "language:en", "license:mit", "arxiv:2402.09910", "region:us" ]
2024-02-02T11:10:57+00:00
{"language": ["en"], "license": "mit", "task_categories": ["question-answering"]}
2024-02-16T09:22:11+00:00
[ "2402.09910" ]
[ "en" ]
TAGS #task_categories-question-answering #language-English #license-mit #arxiv-2402.09910 #region-us
# BookTection Dataset The BookTection dataset serves as a benchmark designed for the task of detecting pretraining data from Large Language models. The dataset consists of 165 books. - 60 published in 2023: Non-Training data, "_label_" column = 0. - 105 published before 2022: Training data, "_label_" column = 1. From each book ≈ 34 passages are extracted. Each passage is paraphrased 3 times using the Language Model Claude v2.0. <br> The "_Answer_" column indicates which of the passages is the real excerpt.<br> Passages come in 3 different sizes (small, medium and large) which aim to be respectively ≈(64, 128 and 256) tokens in length. <br> # Testing Models on BookTection Our dataset is planned to be used on a Multiple-Choice-Question-Answering format. Nonetheless, it is compatible to be used with other pretraining data detection methods.<br> Our GitHub repository contains example scripts to evaluate models on our dataset. <br> # Compatibility The Multiple-Choice-Question-Answering task with our Dataset is designed to be applied to various models, such as:<br> - LLaMA-2 - Mistral - Mixtral - Chat-GPT (gpt-3.5-turbo-instruct) - GPT-3 (text-davinci-003) - Claude <br> # Loading the Dataset <br> # Citation <details> <summary> Book References</summary> [1] Orwell, G. (2021). Nineteen Eighty-Four. Penguin Classics.<br> [2] Martin, George R. R., author. (1996). A game of thrones. New York :Bantam Books,<br> [3] Joyce, J. (1992). A portrait of the artist as a young man. Wordsworth Editions.<br> [4] Dickens, C. (2012). A tale of two cities. Penguin Classics.<br> [5] Twain, M. (2010). The adventures of huckleberry Finn. William Collins.<br> [6] Carroll, L. (2015). Alice’s adventures in wonderland (M. Burstein, Ed.). Princeton University Press.<br> [7] Doerr, A. (2015). All the light we cannot see. Fourth Estate.<br> [8] Christie, A. (2003). And then there were none. HarperCollins.<br> [9] Brown, D. (2000). Angels & demons. 1st large print ed. New York, Random House Large Print.<br> [10] Montgomery, L. M. (2018). Anne of Green Gables. Wordsworth Editions.<br> [11] Melville, H. (2004). Bartleby The Scrivener. Melville House Publishing.<br> [12] Sewell, A. (2018). Black Beauty. Wordsworth Editions.<br> [13] Meyer, S. (2009). Breaking dawn. 1st special ed. London, Little, Brown Children.<br> [14] Ian, F. (2002). Casino Royale. Penguin Books.<br> [15] Heller, Joseph, 1798-1849. (1961). Catch-22, a novel. New York :The Modern library,<br> [16] Dahl, R. (2016). Charlie and the chocolate factory. Puffin.<br> [17] Dickens, C., & de Gavin, A. (1992). David Copperfield. Wordsworth Editions.<br> [18] Stoker, B. (1993). Dracula. Wordsworth Editions.<br> [19] Herbert, F. (2006). Dune. Hodder Paperback.<br> [20] Meyer, S. (2008). Eclipse. ATOM.<br> [21] Austen, J. (1992). Emma. Wordsworth Editions.<br> [22] Follett, K. (1978). Eye of the needle: a novel. New York, Arbor House.<br> [23] Bradbury, R. (1992). Fahrenheit 451. Del Rey Books.<br> [24] Jong, E. (1973). Fear of flying. [Book club ed.]. New York, Holt, Rinehart and Winston.<br> [25] James, E. L. (2012). Fifty shades of grey. Random House.<br> [26] James, E. L. (2017). Fifty shades darker. Vintage books movie tie-in edition. New York, Vintage Books.<br> [27] Blyton, Enid. (1942). The Famous Five: Five on a treasure island . : .<br> [28] Shelley, M. (2012). Frankenstein. Penguin Classics.<br> [29] Flynn, G. (2014). Gone Girl. Weidenfeld & Nicolson.<br> [30] Dickens, C. (1992). Great Expectations. Wordsworth Editions.<br> [31] Rowling, J. K. (2014). Harry potter and the chamber of secrets. Bloomsbury Childrens Books.<br> [32] Rowling, J. K. (2014). Harry potter and the deathly hallows. Bloomsbury Childrens Books.<br> [33] Rowling, J. K. (2014). Harry potter and the goblet of fire. Bloomsbury Childrens Books.<br> [34] Rowling, J. K. (2014). Harry potter and the half-blood prince. Bloomsbury Childrens Books.<br> [35] Rowling, J. K. (2014). Harry potter and the order of the Phoenix. Bloomsbury Childrens Books.<br> [36] Rowling, J. K. (2014). Harry potter and the philosopher’s stone. Bloomsbury Childrens Books.<br> [37] Rowling, J. K. (2014). Harry potter and the prisoner of azkaban. Bloomsbury Childrens Books.<br> [38] Adams, D. (2007). The hitchhiker’s guide to the galaxy. Random House.<br> [39] Doyle, S. A. C. (2012). The hound of the baskervilles. Penguin Classics.<br> [40] Collins, S. (2008). The Hunger Games. Scholastic.<br> [41] Collins S. (2011). Catching Fire. Scholastic.<br> [42] Collins, S. (2011). Mockingjay. Scholastic.<br> [43] Brown, D. (2013). Inferno: a novel. 1st large print ed. New York, Random House Large Print.<br> [44] Bronte, C. (1992). Jane Eyre. Wordsworth Editions.<br> [45] Alcott, Louisa May, 1832-1888. (1953). Little women. Melbourne ; London ; Baltimore :Penguin Books,<br> [46] Golding, W. (2011). Lord of the flies. Faber & Faber.<br> [47] Tolkien, J. R. R. (1991). The lord of the rings. HarperCollins.<br> [48] Stone, I. (1984). Lust for life. 50th anniversary ed. New York, A Plume Book.<br> [49] Dahl, R. (1988). Matilda. Penguin.<br> [50] Moyes, J. (2012). Me Before You. Michael Joseph.<br> [51] Melville, Herman, 1819-1891. (2009). Moby-Dick. [Ashland, Or.] :Blackstone Audio,<br> [52] Eco, U. (2004). The name of the Rose. Vintage Classics.<br> [53] Meyer, S. (2008). New moon. 1st pbk. ed. New York, Little, Brown.<br> [54] Maugham, W. S. 1., & Crossley, S. (2010). Of human bondage. Unabridged. Old Saybrook, CT, Tantor Media.<br> [55] Dickens, C. (1992). Oliver Twist. Wordsworth Editions.<br> [56] Brown, D. (2017). Origin: a novel. First large print edition. [New York], Random House Large Print.<br> [57] Süskind, Patrick. (1986). Perfume : the story of a murderer. New York :A.A. Knopf,<br> [58] Austen, J. (2003). Pride and Prejudice (V. Jones, Ed.). Penguin Classics.<br> [59] Clavell, J. (1986). Shogun. Bantam Doubleday Dell Publishing Group.<br> [60] Eliot, G. (2012). Silas Marner. Penguin Classics.<br> [61] Gaarder, Jostein, 1952-. (1994). Sophie's world : a novel about the history of philosophy. New York :Farrar, Straus and Giroux,<br> [62] Burroughs, Edgar Rice, 1875-1950. (2010). Tarzan of the apes. Oxford [England] ; New York :Oxford University Press,<br> [63] Hardy, T. (2012). Tess of the D’Urbervilles. Penguin Classics.<br> [64] Doyle, A. C. (1950). The adventures of Sherlock Holmes. New York, The Heritage Press.<br> [65] Twain, M. (2011). The adventures of tom Sawyer. William Collins.<br> [66] Wharton, E. (1994). The age of innocence. Wordsworth Editions.<br> [67] Coelho, P. (1995). The alchemist. Thorsons.<br> [68] Boyne, John, 1971-. (2006). The boy in the striped pajamas : a fable. New York :David Fickling Books,<br> [69] London, J. (2008). The call of the wild. Puffin Classics.<br> [70] Salinger, J. D. (2001). Catcher in the Rye. Back Bay Books.<br> [71] Lewis, C. S. 1., & Baynes, P. (1994). Prince Caspian. New York, N.Y., HarperTrophy.<br> [72] Lewis, C. S.. (1978). The Chronicles of Narnia : The Lion, the Witch and the Wardrobe . New York: Harper Collins Publisher.<br> [73] Lewis, C. S.. (1980). The Chronicles of Narnia : The Voyage of the Dawn Treader . United States of America: Harper Collins Publisher.<br> [74] Brown, D. (2006). The Da Vinci Code. Corgi Books.<br> [75] Green, J. (2013). The Fault in Our Stars. Penguin Books.<br> [76] Hawkins, Paula. The Girl on the Train. New York :Riverhead Books, a member of Penguin Group (USA), 2015.<br> [77] Larsson, S. (2009). The girl with the dragon tattoo. Quercus Publishing.<br> [78] Lowry, L. (2014). The Giver. HarperCollins.<br> [79] Puzo, M. (2009). The Godfather. Arrow Books.<br> [80] Fitzgerald, F. S. (2019). The great Gatsby. Wordsworth Editions.<br> [81] Tolkien, J. R. R. (2012). The Hobbit. HarperCollins.<br> [82] Hawthorne, Nathaniel, 1804-1864. The House of the Seven Gables. Charlottesville, Va. :University of Virginia Library, 1996.<br> [83] Wells, H. G. 1. (1988). The invisible man. Aerie Books.<br> [84] Brown, D. (2009). The lost symbol: a novel. New York, Doubleday.<br> [85] Britannica, T. Editors of Encyclopaedia (2022, January 10). The Mysteries of Udolpho. Encyclopedia Britannica.<br> [86] Hinton, S. E. (2016). The Outsiders. Penguin Books.<br> [87] Wilde, O. (1992). The picture of Dorian gray. Wordsworth Editions.<br> [88] Follett, K. (1990). Pillars of the earth. New York, N.Y., Signet.<br> [89] Camus, A. (2020). The Plague. Penguin Classics.<br> [90] Hawthorne, N. (2003). Scarlet Letter. Penguin Classics.<br> [91] Townsend, Sue. (2003). The secret diary of Adrian Mole aged 13 3/4 . New York: Harper Collins.<br> [92] Burnett, F. H. (2017). The Secret Garden. Virago Press.<br> [93] Young, W. P. (2008). The Shack. Hodder & Stoughton.<br> [94] Ruiz Zafón, C., & Graves, L. (2005). The shadow of the wind. New York, Penguin Books.<br> [95] Tolkien, J. R. R. (1991). The Silmarillion. HarperCollins.<br> [96] James, H. (1991). The turn of the screw. Dover Publications.<br> [97] Hurston, Z. N. (2018). Their eyes were watching god. Virago Press.<br> [98] Achebe, C. (2006). Things Fall Apart. Penguin Classics.<br> [99] Lee, H. (2010). To kill A mockingbird. Arrow Books.<br> [100] Stevenson, R. L. (2016). Treasure Island. Puffin Classics.<br> [101] Meyer, S. (2007). Twilight. ATOM.<br> [102] Joyce, J. (2010). Ulysses. Wordsworth Editions.<br> [103] Tolstoy, L. (1993). War and peace (L. Maude & A. Maude, Trans.). Wordsworth Editions.<br> [104] Parkins, D., & Adams, R. (2014). Watership Down. Puffin Classics.<br> [105] Bronte, E. (2012). Wuthering Heights. Penguin Classics.<br> [106] Shannon, S. (2023). A day of fallen night. New York, Bloomsbury Publishing.<br> [107] Chung, N. (2023). A living remedy: a memoir. First edition. New York, NY, Ecco.<br> [108] Adebayo, A. (2023). A spell of good things: a novel. First edition. New York, Alfred A. Knopf.<br> [109] Koontz, D. R. 1. (2023). After death. First edition. Seattle, Thomas & Mercer.<br> [110] Patterson, J., & DuBois, B. (2022). Blowback. First edition. New York, Little, Brown and Company.<br> [111] Solomon, R. L. (2023). Business or pleasure. First edition. New York, Berkley Romance.<br> [112] Casati, C. (2023). Clytemnestra: a novel. Naperville, Illinois, Sourcebooks Landmark.<br> [113] Smith, T. R. (2023). Cold people: a novel. First Scribner hardcover edition. New York, Scribner.<br> [114] Frumkin, R. (2023). Confidence: a novel. First Simon & Schuster hardcover edition. New York, Simon & Schuster.<br> [115] Ross, R. (2023). Divine rivals: a novel. First edition. New York, Wednesday Books.<br> [116] Fawcett, H. (2023). Emily Wilde's encyclopaedia of faeries. First edition. New York, Del Rey.<br> [117] Rivero, M. (2023). Flores and Miss Paula: a novel. First edition. New York, Ecco, an imprint of HarperCollins Publishers.<br> [118] Henry, E. (2023). Happy place. New York, Berkley.<br> [119] Delury, J. (2023). Hedge: a novel. New York, Zibby Books.<br> [120] Bardugo, L. (2023). Hell bent. First edition. New York, NY, Flatiron Books.<br> [121] Center, K. (2023). Hello stranger. First edition. New York, St. Martin's Press.<br> [122] Hibbert, T. (2023). Highly suspicious and unfairly cute. First edition. New York, Joy Revolution, of Random House Children's Books.<br> [123] Burton, J. (2023). Housebroke. First edition. New York, Berkley Romance.<br> [124] Hendrix, G. (2023). How to sell a haunted house. New York, Berkley.<br> [125] Makkai, R. (2023). I have some questions for you. [New York], Viking.<br> [126] Klune, T. (2023). In the lives of puppets. First edition. New York, Tor Publishing Group.<br> [127] Boyle, L. (2023). In the Silence of Decay. [United States], Lisa Boyle.<br> [128] See, L. (2023). Lady Tan's circle of women: a novel. First Scribner hardcover edition. New York, Scribner.<br> [129] Hazelwood, A. (2023). Love, theoretically. New York, Berkley.<br> [130] George, J. (2023). Maame. First edition. New York, St. Martin's Press.<br> [131] Brown, A. (2023). Perfect little lives. Toronto, ON, Graydon House.<br> [132] Irby, S. (2023). Quietly hostile: essays. New York, Vintage Books, a division of Penguin Random House LLC.<br> [133] Harding, R. (2023). The drowning woman. First edition. New York, Grand Central Publishing.<br> [134] Sittenfeld, C. (2023). Romantic comedy: a novel. First edition. New York, Random House.<br> [135] Davis-Goff, S. (2023). Silent city. First U.S. Edition. New York, Flatiron Books.<br> [136] Moreno-Garcia, S. (2023). Silver nitrate. First edition. New York, Del Rey.<br> [137] Williams, D. (2023). Technically yours. First edition. New York, Berkley Romance.<br> [138] Page, S. (2023). The Book of Beginnings. HarperCollins.<br> [139] Ward, M. (2023). The Darkness Before Them. Orbit.<br> [140] Kantra, V. (2023). The fairytale life of Dorothy Gale. First Edition. New York, Berkley.<br> [141] Ferguson, L. (2023). The fake mate. First edition. New York, Berkley Romance.<br> [142] Whitten, H. (2023). The foxglove king. First Edition. New York, NY, Orbit.<br> [143] Hay, A. (2023). The housekeepers: a novel. Toronto, Ontario, Canada, Graydon House.<br> [144] Johansen, E. (2023). The Kingdom of Sweets: a novel of The Nutcracker. [New York], Dutton.<br> [145] Jackson, L. (2023). The last sinner. First Kensington hardcover edition. New York, NY, Kensington Publishing Corp.<br> [146] Woods, E. (2023). The lost bookshop. Paperback edition. London, One More Chapter.<br> [147] Sager, R. (2023). The only one left: a novel. New York, Dutton.<br> [148] Ryan, R. (2023). The Paris housekeeper. Toronto, Ontario, Canada, Love Inspired.<br> [149] Hashem, S. (2023). The Jasad heir. First edition. New York, NY, Orbit.<br> [150] Leigh, J. (2023). The Silver Ladies Do Lunch, [United States], Boldwood Books.<br> [151] Davis, F. (2023). The spectacular: a novel. New York, Dutton.<br> [152] Harvey, K. W. (2023). The summer of songbirds: a novel. First Gallery Books hardcover edition. New York, Gallery Books.<br> [153] Lauren, C. (2023). The True Love Experiment. First Gallery Books hardcover edition. New York, Gallery Books.<br> [154] Lemmie, A. (2023). The wildest sun: a novel. [New York], Dutton.<br> [155] Hatzopoulou, K. (2023). Threads that bind. New York, Razorbill.<br> [156] Harkaway, N. (2023). Titanium noir: a novel. First edition. New York, Alfred A. Knopf.<br> [157] Adams, K. J. (2023). Tonight, I burn. First edition. New York, NY, Orbit.<br> [158] Parks, A. (2023). Two dead wives. Toronto, Mira.<br> [159] Bailey, T. (2023). Unfortunately yours: a novel. First edition. New York, NY, Avon, an imprint of HarperCollinsPublishers.<br> [160] McGinnis, M. W. (2023). USS Lincoln: Mercy Kill. Avenstar Productions<br> [161] Sebastian, C. (2023). We could be so good: a novel. First edition. New York, NY, Avon, an imprint of HarperCollinsPublishers.<br> [162] LeCheminant, D. (2023). What Dreams May Come. Covenant Communications.<br> [163] Marshall, K. A. (2023). What lies in the woods. First edition. New York, Flatiron Books.<br> [164] Kuang, R. F. (2023). Yellowface: a novel. First edition. New York, NY, William Morrow, an imprint of HarperCollinsPublishers.<br> [165] Bayron, K. (2023). You're not supposed to die tonight. New York, Bloomsbury YA.<br> </details>
[ "# BookTection Dataset\nThe BookTection dataset serves as a benchmark designed for the task of detecting pretraining data from Large Language models.\n\nThe dataset consists of 165 books. \n- 60 published in 2023: Non-Training data, \"_label_\" column = 0.\n- 105 published before 2022: Training data, \"_label_\" column = 1.\n\nFrom each book ≈ 34 passages are extracted. Each passage is paraphrased 3 times using the Language Model Claude v2.0. <br>\nThe \"_Answer_\" column indicates which of the passages is the real excerpt.<br>\nPassages come in 3 different sizes (small, medium and large) which aim to be respectively ≈(64, 128 and 256) tokens in length.\n\n<br>", "# Testing Models on BookTection\nOur dataset is planned to be used on a Multiple-Choice-Question-Answering format. Nonetheless, it is compatible to be used with other pretraining data detection methods.<br>\nOur GitHub repository contains example scripts to evaluate models on our dataset.\n\n<br>", "# Compatibility\nThe Multiple-Choice-Question-Answering task with our Dataset is designed to be applied to various models, such as:<br>\n- LLaMA-2\n- Mistral\n- Mixtral\n- Chat-GPT (gpt-3.5-turbo-instruct)\n- GPT-3 (text-davinci-003)\n- Claude \n\n<br>", "# Loading the Dataset\n\n\n\n<br>", "# Citation\n\n\n\n<details>\n <summary> Book References</summary>\n [1] Orwell, G. (2021). Nineteen Eighty-Four. Penguin Classics.<br>\n [2] Martin, George R. R., author. (1996). A game of thrones. New York :Bantam Books,<br>\n [3] Joyce, J. (1992). A portrait of the artist as a young man. Wordsworth Editions.<br>\n [4] Dickens, C. (2012). A tale of two cities. Penguin Classics.<br>\n [5] Twain, M. (2010). The adventures of huckleberry Finn. William Collins.<br>\n [6] Carroll, L. (2015). Alice’s adventures in wonderland (M. Burstein, Ed.). Princeton University Press.<br>\n [7] Doerr, A. (2015). All the light we cannot see. Fourth Estate.<br>\n [8] Christie, A. (2003). And then there were none. HarperCollins.<br>\n [9] Brown, D. (2000). Angels & demons. 1st large print ed. New York, Random House Large Print.<br>\n [10] Montgomery, L. M. (2018). Anne of Green Gables. Wordsworth Editions.<br>\n [11] Melville, H. (2004). Bartleby The Scrivener. Melville House Publishing.<br>\n [12] Sewell, A. (2018). Black Beauty. Wordsworth Editions.<br>\n [13] Meyer, S. (2009). Breaking dawn. 1st special ed. London, Little, Brown Children.<br>\n [14] Ian, F. (2002). Casino Royale. Penguin Books.<br>\n [15] Heller, Joseph, 1798-1849. (1961). Catch-22, a novel. New York :The Modern library,<br>\n [16] Dahl, R. (2016). Charlie and the chocolate factory. Puffin.<br>\n [17] Dickens, C., & de Gavin, A. (1992). David Copperfield. Wordsworth Editions.<br>\n [18] Stoker, B. (1993). Dracula. Wordsworth Editions.<br>\n [19] Herbert, F. (2006). Dune. Hodder Paperback.<br>\n [20] Meyer, S. (2008). Eclipse. ATOM.<br>\n [21] Austen, J. (1992). Emma. Wordsworth Editions.<br>\n [22] Follett, K. (1978). Eye of the needle: a novel. New York, Arbor House.<br>\n [23] Bradbury, R. (1992). Fahrenheit 451. Del Rey Books.<br>\n [24] Jong, E. (1973). Fear of flying. [Book club ed.]. New York, Holt, Rinehart and Winston.<br>\n [25] James, E. L. (2012). Fifty shades of grey. Random House.<br>\n [26] James, E. L. (2017). Fifty shades darker. Vintage books movie tie-in edition. New York, Vintage Books.<br>\n [27] Blyton, Enid. (1942). The Famous Five: Five on a treasure island . : .<br>\n [28] Shelley, M. (2012). Frankenstein. Penguin Classics.<br>\n [29] Flynn, G. (2014). Gone Girl. Weidenfeld & Nicolson.<br>\n [30] Dickens, C. (1992). Great Expectations. Wordsworth Editions.<br>\n [31] Rowling, J. K. (2014). Harry potter and the chamber of secrets. Bloomsbury Childrens Books.<br>\n [32] Rowling, J. K. (2014). Harry potter and the deathly hallows. Bloomsbury Childrens Books.<br>\n [33] Rowling, J. K. (2014). Harry potter and the goblet of fire. Bloomsbury Childrens Books.<br>\n [34] Rowling, J. K. (2014). Harry potter and the half-blood prince. Bloomsbury Childrens Books.<br>\n [35] Rowling, J. K. (2014). Harry potter and the order of the Phoenix. Bloomsbury Childrens Books.<br>\n [36] Rowling, J. K. (2014). Harry potter and the philosopher’s stone. Bloomsbury Childrens Books.<br>\n [37] Rowling, J. K. (2014). Harry potter and the prisoner of azkaban. Bloomsbury Childrens Books.<br>\n [38] Adams, D. (2007). The hitchhiker’s guide to the galaxy. Random House.<br>\n [39] Doyle, S. A. C. (2012). The hound of the baskervilles. Penguin Classics.<br>\n [40] Collins, S. (2008). The Hunger Games. Scholastic.<br>\n [41] Collins S. (2011). Catching Fire. Scholastic.<br>\n [42] Collins, S. (2011). Mockingjay. Scholastic.<br>\n [43] Brown, D. (2013). Inferno: a novel. 1st large print ed. New York, Random House Large Print.<br>\n [44] Bronte, C. (1992). Jane Eyre. Wordsworth Editions.<br>\n [45] Alcott, Louisa May, 1832-1888. (1953). Little women. Melbourne ; London ; Baltimore :Penguin Books,<br>\n [46] Golding, W. (2011). Lord of the flies. Faber & Faber.<br>\n [47] Tolkien, J. R. R. (1991). The lord of the rings. HarperCollins.<br>\n [48] Stone, I. (1984). Lust for life. 50th anniversary ed. New York, A Plume Book.<br>\n [49] Dahl, R. (1988). Matilda. Penguin.<br>\n [50] Moyes, J. (2012). Me Before You. Michael Joseph.<br>\n [51] Melville, Herman, 1819-1891. (2009). Moby-Dick. [Ashland, Or.] :Blackstone Audio,<br>\n [52] Eco, U. (2004). The name of the Rose. Vintage Classics.<br>\n [53] Meyer, S. (2008). New moon. 1st pbk. ed. New York, Little, Brown.<br>\n [54] Maugham, W. S. 1., & Crossley, S. (2010). Of human bondage. Unabridged. Old Saybrook, CT, Tantor Media.<br>\n [55] Dickens, C. (1992). Oliver Twist. Wordsworth Editions.<br>\n [56] Brown, D. (2017). Origin: a novel. First large print edition. [New York], Random House Large Print.<br>\n [57] Süskind, Patrick. (1986). Perfume : the story of a murderer. New York :A.A. Knopf,<br>\n [58] Austen, J. (2003). Pride and Prejudice (V. Jones, Ed.). Penguin Classics.<br>\n [59] Clavell, J. (1986). Shogun. Bantam Doubleday Dell Publishing Group.<br>\n [60] Eliot, G. (2012). Silas Marner. Penguin Classics.<br>\n [61] Gaarder, Jostein, 1952-. (1994). Sophie's world : a novel about the history of philosophy. New York :Farrar, Straus and Giroux,<br>\n [62] Burroughs, Edgar Rice, 1875-1950. (2010). Tarzan of the apes. Oxford [England] ; New York :Oxford University Press,<br>\n [63] Hardy, T. (2012). Tess of the D’Urbervilles. Penguin Classics.<br>\n [64] Doyle, A. C. (1950). The adventures of Sherlock Holmes. New York, The Heritage Press.<br>\n [65] Twain, M. (2011). The adventures of tom Sawyer. William Collins.<br>\n [66] Wharton, E. (1994). The age of innocence. Wordsworth Editions.<br>\n [67] Coelho, P. (1995). The alchemist. Thorsons.<br>\n [68] Boyne, John, 1971-. (2006). The boy in the striped pajamas : a fable. New York :David Fickling Books,<br>\n [69] London, J. (2008). The call of the wild. Puffin Classics.<br>\n [70] Salinger, J. D. (2001). Catcher in the Rye. Back Bay Books.<br>\n [71] Lewis, C. S. 1., & Baynes, P. (1994). Prince Caspian. New York, N.Y., HarperTrophy.<br>\n [72] Lewis, C. S.. (1978). The Chronicles of Narnia : The Lion, the Witch and the Wardrobe . New York: Harper Collins Publisher.<br>\n [73] Lewis, C. S.. (1980). The Chronicles of Narnia : The Voyage of the Dawn Treader . United States of America: Harper Collins Publisher.<br>\n [74] Brown, D. (2006). The Da Vinci Code. Corgi Books.<br>\n [75] Green, J. (2013). The Fault in Our Stars. Penguin Books.<br>\n [76] Hawkins, Paula. The Girl on the Train. New York :Riverhead Books, a member of Penguin Group (USA), 2015.<br>\n [77] Larsson, S. (2009). The girl with the dragon tattoo. Quercus Publishing.<br>\n [78] Lowry, L. (2014). The Giver. HarperCollins.<br>\n [79] Puzo, M. (2009). The Godfather. Arrow Books.<br>\n [80] Fitzgerald, F. S. (2019). The great Gatsby. Wordsworth Editions.<br>\n [81] Tolkien, J. R. R. (2012). The Hobbit. HarperCollins.<br>\n [82] Hawthorne, Nathaniel, 1804-1864. The House of the Seven Gables. Charlottesville, Va. :University of Virginia Library, 1996.<br>\n [83] Wells, H. G. 1. (1988). The invisible man. Aerie Books.<br>\n [84] Brown, D. (2009). The lost symbol: a novel. New York, Doubleday.<br>\n [85] Britannica, T. Editors of Encyclopaedia (2022, January 10). The Mysteries of Udolpho. Encyclopedia Britannica.<br>\n [86] Hinton, S. E. (2016). The Outsiders. Penguin Books.<br>\n [87] Wilde, O. (1992). The picture of Dorian gray. Wordsworth Editions.<br>\n [88] Follett, K. (1990). Pillars of the earth. New York, N.Y., Signet.<br>\n [89] Camus, A. (2020). The Plague. Penguin Classics.<br>\n [90] Hawthorne, N. (2003). Scarlet Letter. Penguin Classics.<br>\n [91] Townsend, Sue. (2003). The secret diary of Adrian Mole aged 13 3/4 . New York: Harper Collins.<br>\n [92] Burnett, F. H. (2017). The Secret Garden. Virago Press.<br>\n [93] Young, W. P. (2008). The Shack. Hodder & Stoughton.<br>\n [94] Ruiz Zafón, C., & Graves, L. (2005). The shadow of the wind. New York, Penguin Books.<br>\n [95] Tolkien, J. R. R. (1991). The Silmarillion. HarperCollins.<br>\n [96] James, H. (1991). The turn of the screw. Dover Publications.<br>\n [97] Hurston, Z. N. (2018). Their eyes were watching god. Virago Press.<br>\n [98] Achebe, C. (2006). Things Fall Apart. Penguin Classics.<br>\n [99] Lee, H. (2010). To kill A mockingbird. Arrow Books.<br>\n [100] Stevenson, R. L. (2016). Treasure Island. Puffin Classics.<br>\n [101] Meyer, S. (2007). Twilight. ATOM.<br>\n [102] Joyce, J. (2010). Ulysses. Wordsworth Editions.<br>\n [103] Tolstoy, L. (1993). War and peace (L. Maude & A. Maude, Trans.). Wordsworth Editions.<br>\n [104] Parkins, D., & Adams, R. (2014). Watership Down. Puffin Classics.<br>\n [105] Bronte, E. (2012). Wuthering Heights. Penguin Classics.<br>\n [106] Shannon, S. (2023). A day of fallen night. New York, Bloomsbury Publishing.<br>\n [107] Chung, N. (2023). A living remedy: a memoir. First edition. New York, NY, Ecco.<br>\n [108] Adebayo, A. (2023). A spell of good things: a novel. First edition. New York, Alfred A. Knopf.<br>\n [109] Koontz, D. R. 1. (2023). After death. First edition. Seattle, Thomas & Mercer.<br>\n [110] Patterson, J., & DuBois, B. (2022). Blowback. First edition. New York, Little, Brown and Company.<br>\n [111] Solomon, R. L. (2023). Business or pleasure. First edition. New York, Berkley Romance.<br>\n [112] Casati, C. (2023). Clytemnestra: a novel. Naperville, Illinois, Sourcebooks Landmark.<br>\n [113] Smith, T. R. (2023). Cold people: a novel. First Scribner hardcover edition. New York, Scribner.<br>\n [114] Frumkin, R. (2023). Confidence: a novel. First Simon & Schuster hardcover edition. New York, Simon & Schuster.<br>\n [115] Ross, R. (2023). Divine rivals: a novel. First edition. New York, Wednesday Books.<br>\n [116] Fawcett, H. (2023). Emily Wilde's encyclopaedia of faeries. First edition. New York, Del Rey.<br>\n [117] Rivero, M. (2023). Flores and Miss Paula: a novel. First edition. New York, Ecco, an imprint of HarperCollins Publishers.<br>\n [118] Henry, E. (2023). Happy place. New York, Berkley.<br>\n [119] Delury, J. (2023). Hedge: a novel. New York, Zibby Books.<br>\n [120] Bardugo, L. (2023). Hell bent. First edition. New York, NY, Flatiron Books.<br>\n [121] Center, K. (2023). Hello stranger. First edition. New York, St. Martin's Press.<br>\n [122] Hibbert, T. (2023). Highly suspicious and unfairly cute. First edition. New York, Joy Revolution, of Random House Children's Books.<br>\n [123] Burton, J. (2023). Housebroke. First edition. New York, Berkley Romance.<br>\n [124] Hendrix, G. (2023). How to sell a haunted house. New York, Berkley.<br>\n [125] Makkai, R. (2023). I have some questions for you. [New York], Viking.<br>\n [126] Klune, T. (2023). In the lives of puppets. First edition. New York, Tor Publishing Group.<br>\n [127] Boyle, L. (2023). In the Silence of Decay. [United States], Lisa Boyle.<br>\n [128] See, L. (2023). Lady Tan's circle of women: a novel. First Scribner hardcover edition. New York, Scribner.<br>\n [129] Hazelwood, A. (2023). Love, theoretically. New York, Berkley.<br>\n [130] George, J. (2023). Maame. First edition. New York, St. Martin's Press.<br>\n [131] Brown, A. (2023). Perfect little lives. Toronto, ON, Graydon House.<br>\n [132] Irby, S. (2023). Quietly hostile: essays. New York, Vintage Books, a division of Penguin Random House LLC.<br>\n [133] Harding, R. (2023). The drowning woman. First edition. New York, Grand Central Publishing.<br>\n [134] Sittenfeld, C. (2023). Romantic comedy: a novel. First edition. New York, Random House.<br>\n [135] Davis-Goff, S. (2023). Silent city. First U.S. Edition. New York, Flatiron Books.<br>\n [136] Moreno-Garcia, S. (2023). Silver nitrate. First edition. New York, Del Rey.<br>\n [137] Williams, D. (2023). Technically yours. First edition. New York, Berkley Romance.<br>\n [138] Page, S. (2023). The Book of Beginnings. HarperCollins.<br>\n [139] Ward, M. (2023). The Darkness Before Them. Orbit.<br>\n [140] Kantra, V. (2023). The fairytale life of Dorothy Gale. First Edition. New York, Berkley.<br>\n [141] Ferguson, L. (2023). The fake mate. First edition. New York, Berkley Romance.<br>\n [142] Whitten, H. (2023). The foxglove king. First Edition. New York, NY, Orbit.<br>\n [143] Hay, A. (2023). The housekeepers: a novel. Toronto, Ontario, Canada, Graydon House.<br>\n [144] Johansen, E. (2023). The Kingdom of Sweets: a novel of The Nutcracker. [New York], Dutton.<br>\n [145] Jackson, L. (2023). The last sinner. First Kensington hardcover edition. New York, NY, Kensington Publishing Corp.<br>\n [146] Woods, E. (2023). The lost bookshop. Paperback edition. London, One More Chapter.<br>\n [147] Sager, R. (2023). The only one left: a novel. New York, Dutton.<br>\n [148] Ryan, R. (2023). The Paris housekeeper. Toronto, Ontario, Canada, Love Inspired.<br>\n [149] Hashem, S. (2023). The Jasad heir. First edition. New York, NY, Orbit.<br>\n [150] Leigh, J. (2023). The Silver Ladies Do Lunch, [United States], Boldwood Books.<br>\n [151] Davis, F. (2023). The spectacular: a novel. New York, Dutton.<br>\n [152] Harvey, K. W. (2023). The summer of songbirds: a novel. First Gallery Books hardcover edition. New York, Gallery Books.<br>\n [153] Lauren, C. (2023). The True Love Experiment. First Gallery Books hardcover edition. New York, Gallery Books.<br>\n [154] Lemmie, A. (2023). The wildest sun: a novel. [New York], Dutton.<br>\n [155] Hatzopoulou, K. (2023). Threads that bind. New York, Razorbill.<br>\n [156] Harkaway, N. (2023). Titanium noir: a novel. First edition. New York, Alfred A. Knopf.<br>\n [157] Adams, K. J. (2023). Tonight, I burn. First edition. New York, NY, Orbit.<br>\n [158] Parks, A. (2023). Two dead wives. Toronto, Mira.<br>\n [159] Bailey, T. (2023). Unfortunately yours: a novel. First edition. New York, NY, Avon, an imprint of HarperCollinsPublishers.<br>\n [160] McGinnis, M. W. (2023). USS Lincoln: Mercy Kill. Avenstar Productions<br>\n [161] Sebastian, C. (2023). We could be so good: a novel. First edition. New York, NY, Avon, an imprint of HarperCollinsPublishers.<br>\n [162] LeCheminant, D. (2023). What Dreams May Come. Covenant Communications.<br>\n [163] Marshall, K. A. (2023). What lies in the woods. First edition. New York, Flatiron Books.<br>\n [164] Kuang, R. F. (2023). Yellowface: a novel. First edition. New York, NY, William Morrow, an imprint of HarperCollinsPublishers.<br>\n [165] Bayron, K. (2023). You're not supposed to die tonight. New York, Bloomsbury YA.<br>\n</details>" ]
[ "TAGS\n#task_categories-question-answering #language-English #license-mit #arxiv-2402.09910 #region-us \n", "# BookTection Dataset\nThe BookTection dataset serves as a benchmark designed for the task of detecting pretraining data from Large Language models.\n\nThe dataset consists of 165 books. \n- 60 published in 2023: Non-Training data, \"_label_\" column = 0.\n- 105 published before 2022: Training data, \"_label_\" column = 1.\n\nFrom each book ≈ 34 passages are extracted. Each passage is paraphrased 3 times using the Language Model Claude v2.0. <br>\nThe \"_Answer_\" column indicates which of the passages is the real excerpt.<br>\nPassages come in 3 different sizes (small, medium and large) which aim to be respectively ≈(64, 128 and 256) tokens in length.\n\n<br>", "# Testing Models on BookTection\nOur dataset is planned to be used on a Multiple-Choice-Question-Answering format. Nonetheless, it is compatible to be used with other pretraining data detection methods.<br>\nOur GitHub repository contains example scripts to evaluate models on our dataset.\n\n<br>", "# Compatibility\nThe Multiple-Choice-Question-Answering task with our Dataset is designed to be applied to various models, such as:<br>\n- LLaMA-2\n- Mistral\n- Mixtral\n- Chat-GPT (gpt-3.5-turbo-instruct)\n- GPT-3 (text-davinci-003)\n- Claude \n\n<br>", "# Loading the Dataset\n\n\n\n<br>", "# Citation\n\n\n\n<details>\n <summary> Book References</summary>\n [1] Orwell, G. (2021). Nineteen Eighty-Four. Penguin Classics.<br>\n [2] Martin, George R. R., author. (1996). A game of thrones. New York :Bantam Books,<br>\n [3] Joyce, J. (1992). A portrait of the artist as a young man. Wordsworth Editions.<br>\n [4] Dickens, C. (2012). A tale of two cities. Penguin Classics.<br>\n [5] Twain, M. (2010). The adventures of huckleberry Finn. William Collins.<br>\n [6] Carroll, L. (2015). Alice’s adventures in wonderland (M. Burstein, Ed.). Princeton University Press.<br>\n [7] Doerr, A. (2015). All the light we cannot see. Fourth Estate.<br>\n [8] Christie, A. (2003). And then there were none. HarperCollins.<br>\n [9] Brown, D. (2000). Angels & demons. 1st large print ed. New York, Random House Large Print.<br>\n [10] Montgomery, L. M. (2018). Anne of Green Gables. Wordsworth Editions.<br>\n [11] Melville, H. (2004). Bartleby The Scrivener. Melville House Publishing.<br>\n [12] Sewell, A. (2018). Black Beauty. Wordsworth Editions.<br>\n [13] Meyer, S. (2009). Breaking dawn. 1st special ed. London, Little, Brown Children.<br>\n [14] Ian, F. (2002). Casino Royale. Penguin Books.<br>\n [15] Heller, Joseph, 1798-1849. (1961). Catch-22, a novel. New York :The Modern library,<br>\n [16] Dahl, R. (2016). Charlie and the chocolate factory. Puffin.<br>\n [17] Dickens, C., & de Gavin, A. (1992). David Copperfield. Wordsworth Editions.<br>\n [18] Stoker, B. (1993). Dracula. Wordsworth Editions.<br>\n [19] Herbert, F. (2006). Dune. Hodder Paperback.<br>\n [20] Meyer, S. (2008). Eclipse. ATOM.<br>\n [21] Austen, J. (1992). Emma. Wordsworth Editions.<br>\n [22] Follett, K. (1978). Eye of the needle: a novel. New York, Arbor House.<br>\n [23] Bradbury, R. (1992). Fahrenheit 451. Del Rey Books.<br>\n [24] Jong, E. (1973). Fear of flying. [Book club ed.]. New York, Holt, Rinehart and Winston.<br>\n [25] James, E. L. (2012). Fifty shades of grey. Random House.<br>\n [26] James, E. L. (2017). Fifty shades darker. Vintage books movie tie-in edition. New York, Vintage Books.<br>\n [27] Blyton, Enid. (1942). The Famous Five: Five on a treasure island . : .<br>\n [28] Shelley, M. (2012). Frankenstein. Penguin Classics.<br>\n [29] Flynn, G. (2014). Gone Girl. Weidenfeld & Nicolson.<br>\n [30] Dickens, C. (1992). Great Expectations. Wordsworth Editions.<br>\n [31] Rowling, J. K. (2014). Harry potter and the chamber of secrets. Bloomsbury Childrens Books.<br>\n [32] Rowling, J. K. (2014). Harry potter and the deathly hallows. Bloomsbury Childrens Books.<br>\n [33] Rowling, J. K. (2014). Harry potter and the goblet of fire. Bloomsbury Childrens Books.<br>\n [34] Rowling, J. K. (2014). Harry potter and the half-blood prince. Bloomsbury Childrens Books.<br>\n [35] Rowling, J. K. (2014). Harry potter and the order of the Phoenix. Bloomsbury Childrens Books.<br>\n [36] Rowling, J. K. (2014). Harry potter and the philosopher’s stone. Bloomsbury Childrens Books.<br>\n [37] Rowling, J. K. (2014). Harry potter and the prisoner of azkaban. Bloomsbury Childrens Books.<br>\n [38] Adams, D. (2007). The hitchhiker’s guide to the galaxy. Random House.<br>\n [39] Doyle, S. A. C. (2012). The hound of the baskervilles. Penguin Classics.<br>\n [40] Collins, S. (2008). The Hunger Games. Scholastic.<br>\n [41] Collins S. (2011). Catching Fire. Scholastic.<br>\n [42] Collins, S. (2011). Mockingjay. Scholastic.<br>\n [43] Brown, D. (2013). Inferno: a novel. 1st large print ed. New York, Random House Large Print.<br>\n [44] Bronte, C. (1992). Jane Eyre. Wordsworth Editions.<br>\n [45] Alcott, Louisa May, 1832-1888. (1953). Little women. Melbourne ; London ; Baltimore :Penguin Books,<br>\n [46] Golding, W. (2011). Lord of the flies. Faber & Faber.<br>\n [47] Tolkien, J. R. R. (1991). The lord of the rings. HarperCollins.<br>\n [48] Stone, I. (1984). Lust for life. 50th anniversary ed. New York, A Plume Book.<br>\n [49] Dahl, R. (1988). Matilda. Penguin.<br>\n [50] Moyes, J. (2012). Me Before You. Michael Joseph.<br>\n [51] Melville, Herman, 1819-1891. (2009). Moby-Dick. [Ashland, Or.] :Blackstone Audio,<br>\n [52] Eco, U. (2004). The name of the Rose. Vintage Classics.<br>\n [53] Meyer, S. (2008). New moon. 1st pbk. ed. New York, Little, Brown.<br>\n [54] Maugham, W. S. 1., & Crossley, S. (2010). Of human bondage. Unabridged. Old Saybrook, CT, Tantor Media.<br>\n [55] Dickens, C. (1992). Oliver Twist. Wordsworth Editions.<br>\n [56] Brown, D. (2017). Origin: a novel. First large print edition. [New York], Random House Large Print.<br>\n [57] Süskind, Patrick. (1986). Perfume : the story of a murderer. New York :A.A. Knopf,<br>\n [58] Austen, J. (2003). Pride and Prejudice (V. Jones, Ed.). Penguin Classics.<br>\n [59] Clavell, J. (1986). Shogun. Bantam Doubleday Dell Publishing Group.<br>\n [60] Eliot, G. (2012). Silas Marner. Penguin Classics.<br>\n [61] Gaarder, Jostein, 1952-. (1994). Sophie's world : a novel about the history of philosophy. New York :Farrar, Straus and Giroux,<br>\n [62] Burroughs, Edgar Rice, 1875-1950. (2010). Tarzan of the apes. Oxford [England] ; New York :Oxford University Press,<br>\n [63] Hardy, T. (2012). Tess of the D’Urbervilles. Penguin Classics.<br>\n [64] Doyle, A. C. (1950). The adventures of Sherlock Holmes. New York, The Heritage Press.<br>\n [65] Twain, M. (2011). The adventures of tom Sawyer. William Collins.<br>\n [66] Wharton, E. (1994). The age of innocence. Wordsworth Editions.<br>\n [67] Coelho, P. (1995). The alchemist. Thorsons.<br>\n [68] Boyne, John, 1971-. (2006). The boy in the striped pajamas : a fable. New York :David Fickling Books,<br>\n [69] London, J. (2008). The call of the wild. Puffin Classics.<br>\n [70] Salinger, J. D. (2001). Catcher in the Rye. Back Bay Books.<br>\n [71] Lewis, C. S. 1., & Baynes, P. (1994). Prince Caspian. New York, N.Y., HarperTrophy.<br>\n [72] Lewis, C. S.. (1978). The Chronicles of Narnia : The Lion, the Witch and the Wardrobe . New York: Harper Collins Publisher.<br>\n [73] Lewis, C. S.. (1980). The Chronicles of Narnia : The Voyage of the Dawn Treader . United States of America: Harper Collins Publisher.<br>\n [74] Brown, D. (2006). The Da Vinci Code. Corgi Books.<br>\n [75] Green, J. (2013). The Fault in Our Stars. Penguin Books.<br>\n [76] Hawkins, Paula. The Girl on the Train. New York :Riverhead Books, a member of Penguin Group (USA), 2015.<br>\n [77] Larsson, S. (2009). The girl with the dragon tattoo. Quercus Publishing.<br>\n [78] Lowry, L. (2014). The Giver. HarperCollins.<br>\n [79] Puzo, M. (2009). The Godfather. Arrow Books.<br>\n [80] Fitzgerald, F. S. (2019). The great Gatsby. Wordsworth Editions.<br>\n [81] Tolkien, J. R. R. (2012). The Hobbit. HarperCollins.<br>\n [82] Hawthorne, Nathaniel, 1804-1864. The House of the Seven Gables. Charlottesville, Va. :University of Virginia Library, 1996.<br>\n [83] Wells, H. G. 1. (1988). The invisible man. Aerie Books.<br>\n [84] Brown, D. (2009). The lost symbol: a novel. New York, Doubleday.<br>\n [85] Britannica, T. Editors of Encyclopaedia (2022, January 10). The Mysteries of Udolpho. Encyclopedia Britannica.<br>\n [86] Hinton, S. E. (2016). The Outsiders. Penguin Books.<br>\n [87] Wilde, O. (1992). The picture of Dorian gray. Wordsworth Editions.<br>\n [88] Follett, K. (1990). Pillars of the earth. New York, N.Y., Signet.<br>\n [89] Camus, A. (2020). The Plague. Penguin Classics.<br>\n [90] Hawthorne, N. (2003). Scarlet Letter. Penguin Classics.<br>\n [91] Townsend, Sue. (2003). The secret diary of Adrian Mole aged 13 3/4 . New York: Harper Collins.<br>\n [92] Burnett, F. H. (2017). The Secret Garden. Virago Press.<br>\n [93] Young, W. P. (2008). The Shack. Hodder & Stoughton.<br>\n [94] Ruiz Zafón, C., & Graves, L. (2005). The shadow of the wind. New York, Penguin Books.<br>\n [95] Tolkien, J. R. R. (1991). The Silmarillion. HarperCollins.<br>\n [96] James, H. (1991). The turn of the screw. Dover Publications.<br>\n [97] Hurston, Z. N. (2018). Their eyes were watching god. Virago Press.<br>\n [98] Achebe, C. (2006). Things Fall Apart. Penguin Classics.<br>\n [99] Lee, H. (2010). To kill A mockingbird. Arrow Books.<br>\n [100] Stevenson, R. L. (2016). Treasure Island. Puffin Classics.<br>\n [101] Meyer, S. (2007). Twilight. ATOM.<br>\n [102] Joyce, J. (2010). Ulysses. Wordsworth Editions.<br>\n [103] Tolstoy, L. (1993). War and peace (L. Maude & A. Maude, Trans.). Wordsworth Editions.<br>\n [104] Parkins, D., & Adams, R. (2014). Watership Down. Puffin Classics.<br>\n [105] Bronte, E. (2012). Wuthering Heights. Penguin Classics.<br>\n [106] Shannon, S. (2023). A day of fallen night. New York, Bloomsbury Publishing.<br>\n [107] Chung, N. (2023). A living remedy: a memoir. First edition. New York, NY, Ecco.<br>\n [108] Adebayo, A. (2023). A spell of good things: a novel. First edition. New York, Alfred A. Knopf.<br>\n [109] Koontz, D. R. 1. (2023). After death. First edition. Seattle, Thomas & Mercer.<br>\n [110] Patterson, J., & DuBois, B. (2022). Blowback. First edition. New York, Little, Brown and Company.<br>\n [111] Solomon, R. L. (2023). Business or pleasure. First edition. New York, Berkley Romance.<br>\n [112] Casati, C. (2023). Clytemnestra: a novel. Naperville, Illinois, Sourcebooks Landmark.<br>\n [113] Smith, T. R. (2023). Cold people: a novel. First Scribner hardcover edition. New York, Scribner.<br>\n [114] Frumkin, R. (2023). Confidence: a novel. First Simon & Schuster hardcover edition. New York, Simon & Schuster.<br>\n [115] Ross, R. (2023). Divine rivals: a novel. First edition. New York, Wednesday Books.<br>\n [116] Fawcett, H. (2023). Emily Wilde's encyclopaedia of faeries. First edition. New York, Del Rey.<br>\n [117] Rivero, M. (2023). Flores and Miss Paula: a novel. First edition. New York, Ecco, an imprint of HarperCollins Publishers.<br>\n [118] Henry, E. (2023). Happy place. New York, Berkley.<br>\n [119] Delury, J. (2023). Hedge: a novel. New York, Zibby Books.<br>\n [120] Bardugo, L. (2023). Hell bent. First edition. New York, NY, Flatiron Books.<br>\n [121] Center, K. (2023). Hello stranger. First edition. New York, St. Martin's Press.<br>\n [122] Hibbert, T. (2023). Highly suspicious and unfairly cute. First edition. New York, Joy Revolution, of Random House Children's Books.<br>\n [123] Burton, J. (2023). Housebroke. First edition. New York, Berkley Romance.<br>\n [124] Hendrix, G. (2023). How to sell a haunted house. New York, Berkley.<br>\n [125] Makkai, R. (2023). I have some questions for you. [New York], Viking.<br>\n [126] Klune, T. (2023). In the lives of puppets. First edition. New York, Tor Publishing Group.<br>\n [127] Boyle, L. (2023). In the Silence of Decay. [United States], Lisa Boyle.<br>\n [128] See, L. (2023). Lady Tan's circle of women: a novel. First Scribner hardcover edition. New York, Scribner.<br>\n [129] Hazelwood, A. (2023). Love, theoretically. New York, Berkley.<br>\n [130] George, J. (2023). Maame. First edition. New York, St. Martin's Press.<br>\n [131] Brown, A. (2023). Perfect little lives. Toronto, ON, Graydon House.<br>\n [132] Irby, S. (2023). Quietly hostile: essays. New York, Vintage Books, a division of Penguin Random House LLC.<br>\n [133] Harding, R. (2023). The drowning woman. First edition. New York, Grand Central Publishing.<br>\n [134] Sittenfeld, C. (2023). Romantic comedy: a novel. First edition. New York, Random House.<br>\n [135] Davis-Goff, S. (2023). Silent city. First U.S. Edition. New York, Flatiron Books.<br>\n [136] Moreno-Garcia, S. (2023). Silver nitrate. First edition. New York, Del Rey.<br>\n [137] Williams, D. (2023). Technically yours. First edition. New York, Berkley Romance.<br>\n [138] Page, S. (2023). The Book of Beginnings. HarperCollins.<br>\n [139] Ward, M. (2023). The Darkness Before Them. Orbit.<br>\n [140] Kantra, V. (2023). The fairytale life of Dorothy Gale. First Edition. New York, Berkley.<br>\n [141] Ferguson, L. (2023). The fake mate. First edition. New York, Berkley Romance.<br>\n [142] Whitten, H. (2023). The foxglove king. First Edition. New York, NY, Orbit.<br>\n [143] Hay, A. (2023). The housekeepers: a novel. Toronto, Ontario, Canada, Graydon House.<br>\n [144] Johansen, E. (2023). The Kingdom of Sweets: a novel of The Nutcracker. [New York], Dutton.<br>\n [145] Jackson, L. (2023). The last sinner. First Kensington hardcover edition. New York, NY, Kensington Publishing Corp.<br>\n [146] Woods, E. (2023). The lost bookshop. Paperback edition. London, One More Chapter.<br>\n [147] Sager, R. (2023). The only one left: a novel. New York, Dutton.<br>\n [148] Ryan, R. (2023). The Paris housekeeper. Toronto, Ontario, Canada, Love Inspired.<br>\n [149] Hashem, S. (2023). The Jasad heir. First edition. New York, NY, Orbit.<br>\n [150] Leigh, J. (2023). The Silver Ladies Do Lunch, [United States], Boldwood Books.<br>\n [151] Davis, F. (2023). The spectacular: a novel. New York, Dutton.<br>\n [152] Harvey, K. W. (2023). The summer of songbirds: a novel. First Gallery Books hardcover edition. New York, Gallery Books.<br>\n [153] Lauren, C. (2023). The True Love Experiment. First Gallery Books hardcover edition. New York, Gallery Books.<br>\n [154] Lemmie, A. (2023). The wildest sun: a novel. [New York], Dutton.<br>\n [155] Hatzopoulou, K. (2023). Threads that bind. New York, Razorbill.<br>\n [156] Harkaway, N. (2023). Titanium noir: a novel. First edition. New York, Alfred A. Knopf.<br>\n [157] Adams, K. J. (2023). Tonight, I burn. First edition. New York, NY, Orbit.<br>\n [158] Parks, A. (2023). Two dead wives. Toronto, Mira.<br>\n [159] Bailey, T. (2023). Unfortunately yours: a novel. First edition. New York, NY, Avon, an imprint of HarperCollinsPublishers.<br>\n [160] McGinnis, M. W. (2023). USS Lincoln: Mercy Kill. Avenstar Productions<br>\n [161] Sebastian, C. (2023). We could be so good: a novel. First edition. New York, NY, Avon, an imprint of HarperCollinsPublishers.<br>\n [162] LeCheminant, D. (2023). What Dreams May Come. Covenant Communications.<br>\n [163] Marshall, K. A. (2023). What lies in the woods. First edition. New York, Flatiron Books.<br>\n [164] Kuang, R. F. (2023). Yellowface: a novel. First edition. New York, NY, William Morrow, an imprint of HarperCollinsPublishers.<br>\n [165] Bayron, K. (2023). You're not supposed to die tonight. New York, Bloomsbury YA.<br>\n</details>" ]
334b96757f775224a8987bd1e7d8a55f06988ae5
# Dataset Card for Evaluation run of Weyaxi/very-test <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [Weyaxi/very-test](https://huggingface.co/Weyaxi/very-test) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_Weyaxi__very-test", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-02-02T11:08:50.720167](https://huggingface.co/datasets/open-llm-leaderboard/details_Weyaxi__very-test/blob/main/results_2024-02-02T11-08-50.720167.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.621240021862014, "acc_stderr": 0.032135488932384076, "acc_norm": 0.6323055902860398, "acc_norm_stderr": 0.03292467379464547, "mc1": 0.2876376988984088, "mc1_stderr": 0.01584631510139481, "mc2": 0.44279159783454264, "mc2_stderr": 0.015167006349063972 }, "harness|arc:challenge|25": { "acc": 0.5998293515358362, "acc_stderr": 0.014317197787809172, "acc_norm": 0.6390784982935154, "acc_norm_stderr": 0.014034761386175452 }, "harness|hellaswag|10": { "acc": 0.6256721768571998, "acc_stderr": 0.004829598101635787, "acc_norm": 0.8170683130850428, "acc_norm_stderr": 0.0038582038518199338 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.32, "acc_stderr": 0.046882617226215034, "acc_norm": 0.32, "acc_norm_stderr": 0.046882617226215034 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.5703703703703704, "acc_stderr": 0.04276349494376599, "acc_norm": 0.5703703703703704, "acc_norm_stderr": 0.04276349494376599 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.6973684210526315, "acc_stderr": 0.03738520676119668, "acc_norm": 0.6973684210526315, "acc_norm_stderr": 0.03738520676119668 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.66, "acc_stderr": 0.04760952285695237, "acc_norm": 0.66, "acc_norm_stderr": 0.04760952285695237 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.6679245283018868, "acc_stderr": 0.02898545565233439, "acc_norm": 0.6679245283018868, "acc_norm_stderr": 0.02898545565233439 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.7361111111111112, "acc_stderr": 0.03685651095897532, "acc_norm": 0.7361111111111112, "acc_norm_stderr": 0.03685651095897532 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.43, "acc_stderr": 0.04975698519562428, "acc_norm": 0.43, "acc_norm_stderr": 0.04975698519562428 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.54, "acc_stderr": 0.05009082659620333, "acc_norm": 0.54, "acc_norm_stderr": 0.05009082659620333 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.3, "acc_stderr": 0.046056618647183814, "acc_norm": 0.3, "acc_norm_stderr": 0.046056618647183814 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.6647398843930635, "acc_stderr": 0.03599586301247077, "acc_norm": 0.6647398843930635, "acc_norm_stderr": 0.03599586301247077 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.39215686274509803, "acc_stderr": 0.04858083574266344, "acc_norm": 0.39215686274509803, "acc_norm_stderr": 0.04858083574266344 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.76, "acc_stderr": 0.042923469599092816, "acc_norm": 0.76, "acc_norm_stderr": 0.042923469599092816 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.5446808510638298, "acc_stderr": 0.032555253593403555, "acc_norm": 0.5446808510638298, "acc_norm_stderr": 0.032555253593403555 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.49122807017543857, "acc_stderr": 0.04702880432049615, "acc_norm": 0.49122807017543857, "acc_norm_stderr": 0.04702880432049615 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.5862068965517241, "acc_stderr": 0.04104269211806232, "acc_norm": 0.5862068965517241, "acc_norm_stderr": 0.04104269211806232 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.4126984126984127, "acc_stderr": 0.02535574126305527, "acc_norm": 0.4126984126984127, "acc_norm_stderr": 0.02535574126305527 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.48412698412698413, "acc_stderr": 0.04469881854072606, "acc_norm": 0.48412698412698413, "acc_norm_stderr": 0.04469881854072606 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.26, "acc_stderr": 0.044084400227680794, "acc_norm": 0.26, "acc_norm_stderr": 0.044084400227680794 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.7709677419354839, "acc_stderr": 0.023904914311782655, "acc_norm": 0.7709677419354839, "acc_norm_stderr": 0.023904914311782655 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.5172413793103449, "acc_stderr": 0.035158955511656986, "acc_norm": 0.5172413793103449, "acc_norm_stderr": 0.035158955511656986 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.64, "acc_stderr": 0.04824181513244218, "acc_norm": 0.64, "acc_norm_stderr": 0.04824181513244218 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.7757575757575758, "acc_stderr": 0.032568666616811015, "acc_norm": 0.7757575757575758, "acc_norm_stderr": 0.032568666616811015 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.797979797979798, "acc_stderr": 0.02860620428922987, "acc_norm": 0.797979797979798, "acc_norm_stderr": 0.02860620428922987 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.8808290155440415, "acc_stderr": 0.023381935348121417, "acc_norm": 0.8808290155440415, "acc_norm_stderr": 0.023381935348121417 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.6205128205128205, "acc_stderr": 0.024603626924097424, "acc_norm": 0.6205128205128205, "acc_norm_stderr": 0.024603626924097424 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.3333333333333333, "acc_stderr": 0.028742040903948492, "acc_norm": 0.3333333333333333, "acc_norm_stderr": 0.028742040903948492 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.6302521008403361, "acc_stderr": 0.031357095996135904, "acc_norm": 0.6302521008403361, "acc_norm_stderr": 0.031357095996135904 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.31788079470198677, "acc_stderr": 0.038020397601079024, "acc_norm": 0.31788079470198677, "acc_norm_stderr": 0.038020397601079024 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.8440366972477065, "acc_stderr": 0.015555802713590172, "acc_norm": 0.8440366972477065, "acc_norm_stderr": 0.015555802713590172 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.47685185185185186, "acc_stderr": 0.03406315360711507, "acc_norm": 0.47685185185185186, "acc_norm_stderr": 0.03406315360711507 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.8088235294117647, "acc_stderr": 0.027599174300640766, "acc_norm": 0.8088235294117647, "acc_norm_stderr": 0.027599174300640766 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.7805907172995781, "acc_stderr": 0.026939106581553945, "acc_norm": 0.7805907172995781, "acc_norm_stderr": 0.026939106581553945 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.7174887892376681, "acc_stderr": 0.03021683101150878, "acc_norm": 0.7174887892376681, "acc_norm_stderr": 0.03021683101150878 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.7633587786259542, "acc_stderr": 0.03727673575596914, "acc_norm": 0.7633587786259542, "acc_norm_stderr": 0.03727673575596914 }, "harness|hendrycksTest-international_law|5": { "acc": 0.7933884297520661, "acc_stderr": 0.03695980128098824, "acc_norm": 0.7933884297520661, "acc_norm_stderr": 0.03695980128098824 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.75, "acc_stderr": 0.04186091791394607, "acc_norm": 0.75, "acc_norm_stderr": 0.04186091791394607 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.7484662576687117, "acc_stderr": 0.03408997886857529, "acc_norm": 0.7484662576687117, "acc_norm_stderr": 0.03408997886857529 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.3482142857142857, "acc_stderr": 0.04521829902833585, "acc_norm": 0.3482142857142857, "acc_norm_stderr": 0.04521829902833585 }, "harness|hendrycksTest-management|5": { "acc": 0.7669902912621359, "acc_stderr": 0.04185832598928315, "acc_norm": 0.7669902912621359, "acc_norm_stderr": 0.04185832598928315 }, "harness|hendrycksTest-marketing|5": { "acc": 0.8931623931623932, "acc_stderr": 0.020237149008990932, "acc_norm": 0.8931623931623932, "acc_norm_stderr": 0.020237149008990932 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.77, "acc_stderr": 0.04229525846816505, "acc_norm": 0.77, "acc_norm_stderr": 0.04229525846816505 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.8212005108556832, "acc_stderr": 0.01370264371536898, "acc_norm": 0.8212005108556832, "acc_norm_stderr": 0.01370264371536898 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.7312138728323699, "acc_stderr": 0.023868003262500104, "acc_norm": 0.7312138728323699, "acc_norm_stderr": 0.023868003262500104 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.30726256983240224, "acc_stderr": 0.015430158846469607, "acc_norm": 0.30726256983240224, "acc_norm_stderr": 0.015430158846469607 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.7091503267973857, "acc_stderr": 0.02600480036395213, "acc_norm": 0.7091503267973857, "acc_norm_stderr": 0.02600480036395213 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.7041800643086816, "acc_stderr": 0.02592237178881877, "acc_norm": 0.7041800643086816, "acc_norm_stderr": 0.02592237178881877 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.7253086419753086, "acc_stderr": 0.024836057868294674, "acc_norm": 0.7253086419753086, "acc_norm_stderr": 0.024836057868294674 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.4397163120567376, "acc_stderr": 0.029609912075594113, "acc_norm": 0.4397163120567376, "acc_norm_stderr": 0.029609912075594113 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.4634941329856584, "acc_stderr": 0.012736153390214963, "acc_norm": 0.4634941329856584, "acc_norm_stderr": 0.012736153390214963 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.6691176470588235, "acc_stderr": 0.02858270975389844, "acc_norm": 0.6691176470588235, "acc_norm_stderr": 0.02858270975389844 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.6535947712418301, "acc_stderr": 0.019249785691717206, "acc_norm": 0.6535947712418301, "acc_norm_stderr": 0.019249785691717206 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.6454545454545455, "acc_stderr": 0.045820048415054174, "acc_norm": 0.6454545454545455, "acc_norm_stderr": 0.045820048415054174 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.7183673469387755, "acc_stderr": 0.02879518557429129, "acc_norm": 0.7183673469387755, "acc_norm_stderr": 0.02879518557429129 }, "harness|hendrycksTest-sociology|5": { "acc": 0.8109452736318408, "acc_stderr": 0.02768691358801301, "acc_norm": 0.8109452736318408, "acc_norm_stderr": 0.02768691358801301 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.85, "acc_stderr": 0.035887028128263686, "acc_norm": 0.85, "acc_norm_stderr": 0.035887028128263686 }, "harness|hendrycksTest-virology|5": { "acc": 0.5301204819277109, "acc_stderr": 0.03885425420866766, "acc_norm": 0.5301204819277109, "acc_norm_stderr": 0.03885425420866766 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.8070175438596491, "acc_stderr": 0.030267457554898458, "acc_norm": 0.8070175438596491, "acc_norm_stderr": 0.030267457554898458 }, "harness|truthfulqa:mc|0": { "mc1": 0.2876376988984088, "mc1_stderr": 0.01584631510139481, "mc2": 0.44279159783454264, "mc2_stderr": 0.015167006349063972 }, "harness|winogrande|5": { "acc": 0.7868981846882399, "acc_stderr": 0.011508957690722752 }, "harness|gsm8k|5": { "acc": 0.03335860500379075, "acc_stderr": 0.004946282649173775 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_Weyaxi__very-test
[ "region:us" ]
2024-02-02T11:11:10+00:00
{"pretty_name": "Evaluation run of Weyaxi/very-test", "dataset_summary": "Dataset automatically created during the evaluation run of model [Weyaxi/very-test](https://huggingface.co/Weyaxi/very-test) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_Weyaxi__very-test\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-02-02T11:08:50.720167](https://huggingface.co/datasets/open-llm-leaderboard/details_Weyaxi__very-test/blob/main/results_2024-02-02T11-08-50.720167.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.621240021862014,\n \"acc_stderr\": 0.032135488932384076,\n \"acc_norm\": 0.6323055902860398,\n \"acc_norm_stderr\": 0.03292467379464547,\n \"mc1\": 0.2876376988984088,\n \"mc1_stderr\": 0.01584631510139481,\n \"mc2\": 0.44279159783454264,\n \"mc2_stderr\": 0.015167006349063972\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.5998293515358362,\n \"acc_stderr\": 0.014317197787809172,\n \"acc_norm\": 0.6390784982935154,\n \"acc_norm_stderr\": 0.014034761386175452\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.6256721768571998,\n \"acc_stderr\": 0.004829598101635787,\n \"acc_norm\": 0.8170683130850428,\n \"acc_norm_stderr\": 0.0038582038518199338\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.32,\n \"acc_stderr\": 0.046882617226215034,\n \"acc_norm\": 0.32,\n \"acc_norm_stderr\": 0.046882617226215034\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.5703703703703704,\n \"acc_stderr\": 0.04276349494376599,\n \"acc_norm\": 0.5703703703703704,\n \"acc_norm_stderr\": 0.04276349494376599\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.6973684210526315,\n \"acc_stderr\": 0.03738520676119668,\n \"acc_norm\": 0.6973684210526315,\n \"acc_norm_stderr\": 0.03738520676119668\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.66,\n \"acc_stderr\": 0.04760952285695237,\n \"acc_norm\": 0.66,\n \"acc_norm_stderr\": 0.04760952285695237\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.6679245283018868,\n \"acc_stderr\": 0.02898545565233439,\n \"acc_norm\": 0.6679245283018868,\n \"acc_norm_stderr\": 0.02898545565233439\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.7361111111111112,\n \"acc_stderr\": 0.03685651095897532,\n \"acc_norm\": 0.7361111111111112,\n \"acc_norm_stderr\": 0.03685651095897532\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.43,\n \"acc_stderr\": 0.04975698519562428,\n \"acc_norm\": 0.43,\n \"acc_norm_stderr\": 0.04975698519562428\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.54,\n \"acc_stderr\": 0.05009082659620333,\n \"acc_norm\": 0.54,\n \"acc_norm_stderr\": 0.05009082659620333\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.3,\n \"acc_stderr\": 0.046056618647183814,\n \"acc_norm\": 0.3,\n \"acc_norm_stderr\": 0.046056618647183814\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.6647398843930635,\n \"acc_stderr\": 0.03599586301247077,\n \"acc_norm\": 0.6647398843930635,\n \"acc_norm_stderr\": 0.03599586301247077\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.39215686274509803,\n \"acc_stderr\": 0.04858083574266344,\n \"acc_norm\": 0.39215686274509803,\n \"acc_norm_stderr\": 0.04858083574266344\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.76,\n \"acc_stderr\": 0.042923469599092816,\n \"acc_norm\": 0.76,\n \"acc_norm_stderr\": 0.042923469599092816\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.5446808510638298,\n \"acc_stderr\": 0.032555253593403555,\n \"acc_norm\": 0.5446808510638298,\n \"acc_norm_stderr\": 0.032555253593403555\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.49122807017543857,\n \"acc_stderr\": 0.04702880432049615,\n \"acc_norm\": 0.49122807017543857,\n \"acc_norm_stderr\": 0.04702880432049615\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.5862068965517241,\n \"acc_stderr\": 0.04104269211806232,\n \"acc_norm\": 0.5862068965517241,\n \"acc_norm_stderr\": 0.04104269211806232\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.4126984126984127,\n \"acc_stderr\": 0.02535574126305527,\n \"acc_norm\": 0.4126984126984127,\n \"acc_norm_stderr\": 0.02535574126305527\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.48412698412698413,\n \"acc_stderr\": 0.04469881854072606,\n \"acc_norm\": 0.48412698412698413,\n \"acc_norm_stderr\": 0.04469881854072606\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.26,\n \"acc_stderr\": 0.044084400227680794,\n \"acc_norm\": 0.26,\n \"acc_norm_stderr\": 0.044084400227680794\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.7709677419354839,\n \"acc_stderr\": 0.023904914311782655,\n \"acc_norm\": 0.7709677419354839,\n \"acc_norm_stderr\": 0.023904914311782655\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.5172413793103449,\n \"acc_stderr\": 0.035158955511656986,\n \"acc_norm\": 0.5172413793103449,\n \"acc_norm_stderr\": 0.035158955511656986\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.64,\n \"acc_stderr\": 0.04824181513244218,\n \"acc_norm\": 0.64,\n \"acc_norm_stderr\": 0.04824181513244218\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.7757575757575758,\n \"acc_stderr\": 0.032568666616811015,\n \"acc_norm\": 0.7757575757575758,\n \"acc_norm_stderr\": 0.032568666616811015\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.797979797979798,\n \"acc_stderr\": 0.02860620428922987,\n \"acc_norm\": 0.797979797979798,\n \"acc_norm_stderr\": 0.02860620428922987\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.8808290155440415,\n \"acc_stderr\": 0.023381935348121417,\n \"acc_norm\": 0.8808290155440415,\n \"acc_norm_stderr\": 0.023381935348121417\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.6205128205128205,\n \"acc_stderr\": 0.024603626924097424,\n \"acc_norm\": 0.6205128205128205,\n \"acc_norm_stderr\": 0.024603626924097424\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.3333333333333333,\n \"acc_stderr\": 0.028742040903948492,\n \"acc_norm\": 0.3333333333333333,\n \"acc_norm_stderr\": 0.028742040903948492\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.6302521008403361,\n \"acc_stderr\": 0.031357095996135904,\n \"acc_norm\": 0.6302521008403361,\n \"acc_norm_stderr\": 0.031357095996135904\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.31788079470198677,\n \"acc_stderr\": 0.038020397601079024,\n \"acc_norm\": 0.31788079470198677,\n \"acc_norm_stderr\": 0.038020397601079024\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.8440366972477065,\n \"acc_stderr\": 0.015555802713590172,\n \"acc_norm\": 0.8440366972477065,\n \"acc_norm_stderr\": 0.015555802713590172\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.47685185185185186,\n \"acc_stderr\": 0.03406315360711507,\n \"acc_norm\": 0.47685185185185186,\n \"acc_norm_stderr\": 0.03406315360711507\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.8088235294117647,\n \"acc_stderr\": 0.027599174300640766,\n \"acc_norm\": 0.8088235294117647,\n \"acc_norm_stderr\": 0.027599174300640766\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.7805907172995781,\n \"acc_stderr\": 0.026939106581553945,\n \"acc_norm\": 0.7805907172995781,\n \"acc_norm_stderr\": 0.026939106581553945\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.7174887892376681,\n \"acc_stderr\": 0.03021683101150878,\n \"acc_norm\": 0.7174887892376681,\n \"acc_norm_stderr\": 0.03021683101150878\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.7633587786259542,\n \"acc_stderr\": 0.03727673575596914,\n \"acc_norm\": 0.7633587786259542,\n \"acc_norm_stderr\": 0.03727673575596914\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.7933884297520661,\n \"acc_stderr\": 0.03695980128098824,\n \"acc_norm\": 0.7933884297520661,\n \"acc_norm_stderr\": 0.03695980128098824\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.75,\n \"acc_stderr\": 0.04186091791394607,\n \"acc_norm\": 0.75,\n \"acc_norm_stderr\": 0.04186091791394607\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.7484662576687117,\n \"acc_stderr\": 0.03408997886857529,\n \"acc_norm\": 0.7484662576687117,\n \"acc_norm_stderr\": 0.03408997886857529\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.3482142857142857,\n \"acc_stderr\": 0.04521829902833585,\n \"acc_norm\": 0.3482142857142857,\n \"acc_norm_stderr\": 0.04521829902833585\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.7669902912621359,\n \"acc_stderr\": 0.04185832598928315,\n \"acc_norm\": 0.7669902912621359,\n \"acc_norm_stderr\": 0.04185832598928315\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8931623931623932,\n \"acc_stderr\": 0.020237149008990932,\n \"acc_norm\": 0.8931623931623932,\n \"acc_norm_stderr\": 0.020237149008990932\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.77,\n \"acc_stderr\": 0.04229525846816505,\n \"acc_norm\": 0.77,\n \"acc_norm_stderr\": 0.04229525846816505\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.8212005108556832,\n \"acc_stderr\": 0.01370264371536898,\n \"acc_norm\": 0.8212005108556832,\n \"acc_norm_stderr\": 0.01370264371536898\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.7312138728323699,\n \"acc_stderr\": 0.023868003262500104,\n \"acc_norm\": 0.7312138728323699,\n \"acc_norm_stderr\": 0.023868003262500104\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.30726256983240224,\n \"acc_stderr\": 0.015430158846469607,\n \"acc_norm\": 0.30726256983240224,\n \"acc_norm_stderr\": 0.015430158846469607\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.7091503267973857,\n \"acc_stderr\": 0.02600480036395213,\n \"acc_norm\": 0.7091503267973857,\n \"acc_norm_stderr\": 0.02600480036395213\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.7041800643086816,\n \"acc_stderr\": 0.02592237178881877,\n \"acc_norm\": 0.7041800643086816,\n \"acc_norm_stderr\": 0.02592237178881877\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.7253086419753086,\n \"acc_stderr\": 0.024836057868294674,\n \"acc_norm\": 0.7253086419753086,\n \"acc_norm_stderr\": 0.024836057868294674\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.4397163120567376,\n \"acc_stderr\": 0.029609912075594113,\n \"acc_norm\": 0.4397163120567376,\n \"acc_norm_stderr\": 0.029609912075594113\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.4634941329856584,\n \"acc_stderr\": 0.012736153390214963,\n \"acc_norm\": 0.4634941329856584,\n \"acc_norm_stderr\": 0.012736153390214963\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.6691176470588235,\n \"acc_stderr\": 0.02858270975389844,\n \"acc_norm\": 0.6691176470588235,\n \"acc_norm_stderr\": 0.02858270975389844\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.6535947712418301,\n \"acc_stderr\": 0.019249785691717206,\n \"acc_norm\": 0.6535947712418301,\n \"acc_norm_stderr\": 0.019249785691717206\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6454545454545455,\n \"acc_stderr\": 0.045820048415054174,\n \"acc_norm\": 0.6454545454545455,\n \"acc_norm_stderr\": 0.045820048415054174\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.7183673469387755,\n \"acc_stderr\": 0.02879518557429129,\n \"acc_norm\": 0.7183673469387755,\n \"acc_norm_stderr\": 0.02879518557429129\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.8109452736318408,\n \"acc_stderr\": 0.02768691358801301,\n \"acc_norm\": 0.8109452736318408,\n \"acc_norm_stderr\": 0.02768691358801301\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.85,\n \"acc_stderr\": 0.035887028128263686,\n \"acc_norm\": 0.85,\n \"acc_norm_stderr\": 0.035887028128263686\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.5301204819277109,\n \"acc_stderr\": 0.03885425420866766,\n \"acc_norm\": 0.5301204819277109,\n \"acc_norm_stderr\": 0.03885425420866766\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.8070175438596491,\n \"acc_stderr\": 0.030267457554898458,\n \"acc_norm\": 0.8070175438596491,\n \"acc_norm_stderr\": 0.030267457554898458\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.2876376988984088,\n \"mc1_stderr\": 0.01584631510139481,\n \"mc2\": 0.44279159783454264,\n \"mc2_stderr\": 0.015167006349063972\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.7868981846882399,\n \"acc_stderr\": 0.011508957690722752\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.03335860500379075,\n \"acc_stderr\": 0.004946282649173775\n }\n}\n```", "repo_url": "https://huggingface.co/Weyaxi/very-test", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_02_02T11_08_50.720167", "path": ["**/details_harness|arc:challenge|25_2024-02-02T11-08-50.720167.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-02-02T11-08-50.720167.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_02_02T11_08_50.720167", "path": ["**/details_harness|gsm8k|5_2024-02-02T11-08-50.720167.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-02-02T11-08-50.720167.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_02_02T11_08_50.720167", "path": ["**/details_harness|hellaswag|10_2024-02-02T11-08-50.720167.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-02-02T11-08-50.720167.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_02_02T11_08_50.720167", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-02T11-08-50.720167.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-02-02T11-08-50.720167.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-02-02T11-08-50.720167.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-02-02T11-08-50.720167.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-02T11-08-50.720167.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-02-02T11-08-50.720167.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-02-02T11-08-50.720167.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-02-02T11-08-50.720167.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-02-02T11-08-50.720167.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-02-02T11-08-50.720167.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-02-02T11-08-50.720167.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-02-02T11-08-50.720167.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-02T11-08-50.720167.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-02-02T11-08-50.720167.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-02T11-08-50.720167.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-02T11-08-50.720167.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-02-02T11-08-50.720167.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-02-02T11-08-50.720167.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-02-02T11-08-50.720167.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-02T11-08-50.720167.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-02T11-08-50.720167.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-02T11-08-50.720167.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-02-02T11-08-50.720167.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-02T11-08-50.720167.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-02T11-08-50.720167.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-02T11-08-50.720167.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-02T11-08-50.720167.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-02-02T11-08-50.720167.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-02T11-08-50.720167.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-02T11-08-50.720167.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-02T11-08-50.720167.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-02T11-08-50.720167.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-02-02T11-08-50.720167.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-02-02T11-08-50.720167.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-02-02T11-08-50.720167.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-02-02T11-08-50.720167.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-02T11-08-50.720167.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-02-02T11-08-50.720167.parquet", "**/details_harness|hendrycksTest-management|5_2024-02-02T11-08-50.720167.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-02-02T11-08-50.720167.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-02-02T11-08-50.720167.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-02-02T11-08-50.720167.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-02-02T11-08-50.720167.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-02T11-08-50.720167.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-02-02T11-08-50.720167.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-02-02T11-08-50.720167.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-02-02T11-08-50.720167.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-02-02T11-08-50.720167.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-02-02T11-08-50.720167.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-02-02T11-08-50.720167.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-02-02T11-08-50.720167.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-02-02T11-08-50.720167.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-02-02T11-08-50.720167.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-02-02T11-08-50.720167.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-02T11-08-50.720167.parquet", "**/details_harness|hendrycksTest-virology|5_2024-02-02T11-08-50.720167.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-02-02T11-08-50.720167.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-02T11-08-50.720167.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-02-02T11-08-50.720167.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-02-02T11-08-50.720167.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-02-02T11-08-50.720167.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-02T11-08-50.720167.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-02-02T11-08-50.720167.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-02-02T11-08-50.720167.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-02-02T11-08-50.720167.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-02-02T11-08-50.720167.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-02-02T11-08-50.720167.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-02-02T11-08-50.720167.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-02-02T11-08-50.720167.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-02T11-08-50.720167.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-02-02T11-08-50.720167.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-02T11-08-50.720167.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-02T11-08-50.720167.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-02-02T11-08-50.720167.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-02-02T11-08-50.720167.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-02-02T11-08-50.720167.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-02T11-08-50.720167.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-02T11-08-50.720167.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-02T11-08-50.720167.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-02-02T11-08-50.720167.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-02T11-08-50.720167.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-02T11-08-50.720167.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-02T11-08-50.720167.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-02T11-08-50.720167.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-02-02T11-08-50.720167.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-02T11-08-50.720167.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-02T11-08-50.720167.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-02T11-08-50.720167.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-02T11-08-50.720167.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-02-02T11-08-50.720167.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-02-02T11-08-50.720167.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-02-02T11-08-50.720167.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-02-02T11-08-50.720167.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-02T11-08-50.720167.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-02-02T11-08-50.720167.parquet", "**/details_harness|hendrycksTest-management|5_2024-02-02T11-08-50.720167.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-02-02T11-08-50.720167.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-02-02T11-08-50.720167.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-02-02T11-08-50.720167.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-02-02T11-08-50.720167.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-02T11-08-50.720167.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-02-02T11-08-50.720167.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-02-02T11-08-50.720167.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-02-02T11-08-50.720167.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-02-02T11-08-50.720167.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-02-02T11-08-50.720167.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-02-02T11-08-50.720167.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-02-02T11-08-50.720167.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-02-02T11-08-50.720167.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-02-02T11-08-50.720167.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-02-02T11-08-50.720167.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-02T11-08-50.720167.parquet", "**/details_harness|hendrycksTest-virology|5_2024-02-02T11-08-50.720167.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-02-02T11-08-50.720167.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_02_02T11_08_50.720167", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-02T11-08-50.720167.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-02T11-08-50.720167.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_02_02T11_08_50.720167", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-02-02T11-08-50.720167.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-02-02T11-08-50.720167.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_02_02T11_08_50.720167", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-02-02T11-08-50.720167.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-02-02T11-08-50.720167.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_02_02T11_08_50.720167", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-02-02T11-08-50.720167.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-02-02T11-08-50.720167.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_02_02T11_08_50.720167", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-02T11-08-50.720167.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-02T11-08-50.720167.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_02_02T11_08_50.720167", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-02-02T11-08-50.720167.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-02-02T11-08-50.720167.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_02_02T11_08_50.720167", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-02-02T11-08-50.720167.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-02-02T11-08-50.720167.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_02_02T11_08_50.720167", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-02-02T11-08-50.720167.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-02-02T11-08-50.720167.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_02_02T11_08_50.720167", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-02-02T11-08-50.720167.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-02-02T11-08-50.720167.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_02_02T11_08_50.720167", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-02-02T11-08-50.720167.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-02-02T11-08-50.720167.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_02_02T11_08_50.720167", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-02-02T11-08-50.720167.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-02-02T11-08-50.720167.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_02_02T11_08_50.720167", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-02-02T11-08-50.720167.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-02-02T11-08-50.720167.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_02_02T11_08_50.720167", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-02T11-08-50.720167.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-02T11-08-50.720167.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_02_02T11_08_50.720167", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-02-02T11-08-50.720167.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-02-02T11-08-50.720167.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_02_02T11_08_50.720167", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-02T11-08-50.720167.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-02T11-08-50.720167.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_02_02T11_08_50.720167", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-02T11-08-50.720167.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-02T11-08-50.720167.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_02_02T11_08_50.720167", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-02-02T11-08-50.720167.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-02-02T11-08-50.720167.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_02_02T11_08_50.720167", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-02-02T11-08-50.720167.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-02-02T11-08-50.720167.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_02_02T11_08_50.720167", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-02-02T11-08-50.720167.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-02-02T11-08-50.720167.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_02_02T11_08_50.720167", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-02T11-08-50.720167.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-02T11-08-50.720167.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_02_02T11_08_50.720167", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-02T11-08-50.720167.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-02T11-08-50.720167.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_02_02T11_08_50.720167", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-02T11-08-50.720167.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-02T11-08-50.720167.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_02_02T11_08_50.720167", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-02-02T11-08-50.720167.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-02-02T11-08-50.720167.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_02_02T11_08_50.720167", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-02T11-08-50.720167.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-02T11-08-50.720167.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_02_02T11_08_50.720167", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-02T11-08-50.720167.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-02T11-08-50.720167.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_02_02T11_08_50.720167", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-02T11-08-50.720167.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-02T11-08-50.720167.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_02_02T11_08_50.720167", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-02T11-08-50.720167.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-02T11-08-50.720167.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_02_02T11_08_50.720167", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-02-02T11-08-50.720167.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-02-02T11-08-50.720167.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_02_02T11_08_50.720167", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-02T11-08-50.720167.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-02T11-08-50.720167.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_02_02T11_08_50.720167", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-02T11-08-50.720167.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-02T11-08-50.720167.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_02_02T11_08_50.720167", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-02T11-08-50.720167.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-02T11-08-50.720167.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_02_02T11_08_50.720167", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-02T11-08-50.720167.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-02T11-08-50.720167.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_02_02T11_08_50.720167", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-02-02T11-08-50.720167.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-02-02T11-08-50.720167.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_02_02T11_08_50.720167", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-02-02T11-08-50.720167.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-02-02T11-08-50.720167.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_02_02T11_08_50.720167", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-02-02T11-08-50.720167.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-02-02T11-08-50.720167.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_02_02T11_08_50.720167", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-02-02T11-08-50.720167.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-02-02T11-08-50.720167.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_02_02T11_08_50.720167", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-02T11-08-50.720167.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-02T11-08-50.720167.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_02_02T11_08_50.720167", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-02-02T11-08-50.720167.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-02-02T11-08-50.720167.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_02_02T11_08_50.720167", "path": ["**/details_harness|hendrycksTest-management|5_2024-02-02T11-08-50.720167.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-02-02T11-08-50.720167.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_02_02T11_08_50.720167", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-02-02T11-08-50.720167.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-02-02T11-08-50.720167.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_02_02T11_08_50.720167", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-02-02T11-08-50.720167.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-02-02T11-08-50.720167.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_02_02T11_08_50.720167", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-02-02T11-08-50.720167.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-02-02T11-08-50.720167.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_02_02T11_08_50.720167", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-02-02T11-08-50.720167.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-02-02T11-08-50.720167.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_02_02T11_08_50.720167", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-02T11-08-50.720167.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-02T11-08-50.720167.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_02_02T11_08_50.720167", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-02-02T11-08-50.720167.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-02-02T11-08-50.720167.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_02_02T11_08_50.720167", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-02-02T11-08-50.720167.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-02-02T11-08-50.720167.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_02_02T11_08_50.720167", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-02-02T11-08-50.720167.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-02-02T11-08-50.720167.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_02_02T11_08_50.720167", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-02-02T11-08-50.720167.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-02-02T11-08-50.720167.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_02_02T11_08_50.720167", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-02-02T11-08-50.720167.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-02-02T11-08-50.720167.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_02_02T11_08_50.720167", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-02-02T11-08-50.720167.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-02-02T11-08-50.720167.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_02_02T11_08_50.720167", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-02-02T11-08-50.720167.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-02-02T11-08-50.720167.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_02_02T11_08_50.720167", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-02-02T11-08-50.720167.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-02-02T11-08-50.720167.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_02_02T11_08_50.720167", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-02-02T11-08-50.720167.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-02-02T11-08-50.720167.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_02_02T11_08_50.720167", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-02-02T11-08-50.720167.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-02-02T11-08-50.720167.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_02_02T11_08_50.720167", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-02T11-08-50.720167.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-02T11-08-50.720167.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_02_02T11_08_50.720167", "path": ["**/details_harness|hendrycksTest-virology|5_2024-02-02T11-08-50.720167.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-02-02T11-08-50.720167.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_02_02T11_08_50.720167", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-02-02T11-08-50.720167.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-02-02T11-08-50.720167.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_02_02T11_08_50.720167", "path": ["**/details_harness|truthfulqa:mc|0_2024-02-02T11-08-50.720167.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-02-02T11-08-50.720167.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_02_02T11_08_50.720167", "path": ["**/details_harness|winogrande|5_2024-02-02T11-08-50.720167.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-02-02T11-08-50.720167.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_02_02T11_08_50.720167", "path": ["results_2024-02-02T11-08-50.720167.parquet"]}, {"split": "latest", "path": ["results_2024-02-02T11-08-50.720167.parquet"]}]}]}
2024-02-02T11:11:33+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of Weyaxi/very-test Dataset automatically created during the evaluation run of model Weyaxi/very-test on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-02-02T11:08:50.720167(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of Weyaxi/very-test\n\n\n\nDataset automatically created during the evaluation run of model Weyaxi/very-test on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-02-02T11:08:50.720167(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of Weyaxi/very-test\n\n\n\nDataset automatically created during the evaluation run of model Weyaxi/very-test on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-02-02T11:08:50.720167(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
28678e8754ebe301a1a5cf1c24466e1b34faa7fb
# Dataset Card for "infographic-sections" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
McSpicyWithMilo/infographic-sections-0.2split
[ "region:us" ]
2024-02-02T11:15:21+00:00
{"dataset_info": {"features": [{"name": "instruction_type", "dtype": "string"}, {"name": "instruction", "dtype": "string"}, {"name": "infographic_section", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 34539, "num_examples": 320}, {"name": "test", "num_bytes": 8415, "num_examples": 80}], "download_size": 20216, "dataset_size": 42954}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}, {"split": "test", "path": "data/test-*"}]}]}
2024-02-02T11:15:33+00:00
[]
[]
TAGS #region-us
# Dataset Card for "infographic-sections" More Information needed
[ "# Dataset Card for \"infographic-sections\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"infographic-sections\"\n\nMore Information needed" ]
6486abfa34a4846bf1b617b702933418173161ca
# Dataset Card for Dataset Name <!-- The dataset only have Phonk music. --> This dataset card aims to be a base template for new datasets. It has been generated using [this raw template](https://github.com/huggingface/huggingface_hub/blob/main/src/huggingface_hub/templates/datasetcard_template.md?plain=1). ## Dataset Details ### Dataset Description <!-- 1. track_name: Think of this as the song’s name, like “Happy” or “Shape of You.” 2. artist(s)_name: It’s the name of the people who made the song, like Ed Sheeran or Taylor Swift. 3. danceability_%: This shows how much your feet will want to dance to the song. It’s like a dancing score. 4. energy_%: It’s like the song’s battery level. High energy means the song is super active. 5. bpm: Think of this as the heartbeat of the song. It’s how fast or slow it goes. 6. key: Just like a door key, this tells us what “mood” the song has. It could be happy (major) or sad (minor). 7. loudness: Just how loud the song is. 8. mode: It’s like knowing if the song is smiling (major) or serious (minor). 9. speechiness_%: This is about talking in the song. High ‘speechiness’ means there are more spoken words. 10. acousticness_%: Imagine this as the “acoustic guitar” level. High acousticness means the song sounds natural. 11. instrumentalness_%: This shows how much of the song is just music without singing. High instrumentalness means it’s more like a musical jam. 12. liveness_%: It’s like having a live concert in your headphones. High liveness means you can feel the energy of a live performance. 13. valence_%: This tells us if the song feels happy or not. The higher, the happier. 14. duration_ms: how long the song is--> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Got the data from spotify. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Your choice. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- it has duplicates so you might want to clean the data up. After I am done with clean up, I might also upload that here.--> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
TakeWhatsYours/Phonk_music
[ "music", "region:us" ]
2024-02-02T11:16:30+00:00
{"tags": ["music"]}
2024-02-03T07:28:21+00:00
[]
[]
TAGS #music #region-us
# Dataset Card for Dataset Name This dataset card aims to be a base template for new datasets. It has been generated using this raw template. ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Dataset Name\n\n\n\nThis dataset card aims to be a base template for new datasets. It has been generated using this raw template.", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#music #region-us \n", "# Dataset Card for Dataset Name\n\n\n\nThis dataset card aims to be a base template for new datasets. It has been generated using this raw template.", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
2fefadd25cff5c20c5724c9b3c357e0d5b186e83
# Dataset Card for [OpenSubtitles-TW-Corpus] ## Table of Contents - [Table of Contents](#table-of-contents) - [Dataset Description](#dataset-description) - [Dataset Summary](#dataset-summary) - [Supported Tasks and Leaderboards](#supported-tasks-and-leaderboards) - [Languages](#languages) - [Dataset Structure](#dataset-structure) - [Data Instances](#data-instances) - [Data Fields](#data-fields) - [Data Splits](#data-splits) - [Dataset Creation](#dataset-creation) - [Curation Rationale](#curation-rationale) - [Source Data](#source-data) - [Annotations](#annotations) - [Personal and Sensitive Information](#personal-and-sensitive-information) - [Considerations for Using the Data](#considerations-for-using-the-data) - [Social Impact of Dataset](#social-impact-of-dataset) - [Discussion of Biases](#discussion-of-biases) - [Other Known Limitations](#other-known-limitations) - [Additional Information](#additional-information) - [Dataset Curators](#dataset-curators) - [Licensing Information](#licensing-information) - [Citation Information](#citation-information) - [Contributions](#contributions) ## Dataset Description - **Homepage:** - **Repository:** - **Paper:** - **Leaderboard:** - **Point of Contact:** [Heng-Shiou Sheu](mailto:[email protected]) ### Dataset Summary OpenSubtitles-TW-Corpus 是一個機器翻譯基準的多語言資料集,源自 [OpenSubtitles](https://opus.nlpl.eu/OpenSubtitles/corpus/version/OpenSubtitles) 收集的使用者貢獻的翻譯,並由 [OPUS](https://opus.nlpl.eu/)。該資料集包括按語言對排序的測試和開發資料。它包括數百種語言對的測試集,並且不斷更新。請檢查版本號標籤以引用您正在使用的版本。 這是字幕集合的稍微乾淨的版本,使用改進的句子對齊和更好的語言檢查。 ### Supported Tasks and Leaderboards ### Languages 此資料集涵蓋數百種語言和語言對,並按 ISO-639-1 語言組織。目前版本涵蓋以下語言。繁體中文、英文、日文、韓文、印尼文、越南文、泰文 ## Dataset Structure ### Data Instances 資料以 , 分隔檔案中內容,具有三個欄位:指示、輸入和輸出。請注意,我們並不暗示平移方向,並認為資料集是對稱的並用作兩個方向的測試集。 ### Data Splits 先整理出 Train 資料。 ## Dataset Creation ### Curation Rationale 本資料集將持續更新,未來將公開發佈於 Github 當中。高語言覆蓋率是本計畫的主要目標,資料集的準備與標準化語言標籤和分發格式保持一致和系統化。 ### Source Data #### Initial Data Collection and Normalization OpenSubtitles 資料集是從 [P. Lison and J. Tiedemann, 2016, OpenSubtitles2016: Extracting Large Parallel Corpora from Movie and TV Subtitles](http://www.lrec-conf.org/proceedings/lrec2016/pdf/947_Paper.pdf)進行整理,並由 [OPUS](https://opus.nlpl.eu) 收納。 #### Who are the source language producers? 這些轉錄本由 [OpenSubtitles 所有](http://www.opensubtitles.org/) ### Personal and Sensitive Information 有關處理個人資訊和敏感資訊的信息,我們請諮詢資料的[原始提供者](http://www.opensubtitles.org/)。該資料集未經過任何方式處理以檢測或刪除潛在的敏感資訊或個人資訊。 ### Social Impact of Dataset 語言覆蓋率很高,因此它代表了機器翻譯開發的非常有價值的資源,特別是對於資源較少的語言和語言對。不斷成長的資料庫也代表著一種動態資源,其價值將進一步成長。 ### Other Known Limitations 這些句子通常很短,因此很容易翻譯。對於高資源語言,這會導致結果不如更具挑戰性的基準有用。對於資源較少的語言對來說,即使在非常具有挑戰性的設定中,範例的有限複雜性實際上也是衡量進度的一件好事。 ### Dataset Curators 此資料集由Heng-Shiou Sheu 製作。 ### Licensing Information 如果您在自己的工作中使用該語料庫的任何部分,請引用以下文章:P. Lison 和 J. Tiedemann,2016,OpenSubtitles2016:從電影和電視字幕中提取大型並行語料庫。第十屆國際語言資源與評估會議 (LREC 2016) 論文集 ### Citation Information ``` @inproceedings{Heng666/OpenSubtitles-TW-Corpus, title={Taiwanese Phrases Multilingual Translation Dataset from OpenSubtitles Talks}, author={Heng-Shiou Sheu}, year={2024}, url={https://huggingface.co/datasets/Heng666/OpenSubtitles-TW-Corpus}, } ```
Heng666/OpenSubtitles-TW-Corpus
[ "task_categories:translation", "size_categories:1M<n<10M", "language:tw", "language:en", "language:ja", "language:ko", "language:id", "language:vi", "language:th", "license:unknown", "translation", "OPUS", "OpenSubtitles", "region:us" ]
2024-02-02T11:17:34+00:00
{"language": ["tw", "en", "ja", "ko", "id", "vi", "th"], "license": "unknown", "size_categories": ["1M<n<10M"], "task_categories": ["translation"], "pretty_name": "Heng666/OpenSubtitles-TW-Corpus", "dataset_info": [{"config_name": "en-zh_tw", "features": [{"name": "instruction", "dtype": "string"}, {"name": "input", "dtype": "string"}, {"name": "output", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 551312007, "num_examples": 3762380}], "download_size": 198513620, "dataset_size": 551312007}, {"config_name": "id-zh_tw", "features": [{"name": "instruction", "dtype": "string"}, {"name": "input", "dtype": "string"}, {"name": "output", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 195956760, "num_examples": 1259138}], "download_size": 69062423, "dataset_size": 195956760}, {"config_name": "ja-zh_tw", "features": [{"name": "instruction", "dtype": "string"}, {"name": "input", "dtype": "string"}, {"name": "output", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 82957644, "num_examples": 558832}], "download_size": 28947031, "dataset_size": 82957644}, {"config_name": "ko-zh_tw", "features": [{"name": "instruction", "dtype": "string"}, {"name": "input", "dtype": "string"}, {"name": "output", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 43408491, "num_examples": 298466}], "download_size": 15889266, "dataset_size": 43408491}, {"config_name": "th-zh_tw", "features": [{"name": "instruction", "dtype": "string"}, {"name": "input", "dtype": "string"}, {"name": "output", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 98383808, "num_examples": 521847}], "download_size": 33136242, "dataset_size": 98383808}, {"config_name": "vi-zh_tw", "features": [{"name": "instruction", "dtype": "string"}, {"name": "input", "dtype": "string"}, {"name": "output", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 131013253, "num_examples": 818723}], "download_size": 45507786, "dataset_size": 131013253}], "configs": [{"config_name": "en-zh_tw", "data_files": [{"split": "train", "path": "en-zh_tw/train-*"}]}, {"config_name": "id-zh_tw", "data_files": [{"split": "train", "path": "id-zh_tw/train-*"}]}, {"config_name": "ja-zh_tw", "data_files": [{"split": "train", "path": "ja-zh_tw/train-*"}]}, {"config_name": "ko-zh_tw", "data_files": [{"split": "train", "path": "ko-zh_tw/train-*"}]}, {"config_name": "th-zh_tw", "data_files": [{"split": "train", "path": "th-zh_tw/train-*"}]}, {"config_name": "vi-zh_tw", "data_files": [{"split": "train", "path": "vi-zh_tw/train-*"}]}], "tags": ["translation", "OPUS", "OpenSubtitles"]}
2024-02-02T13:00:22+00:00
[]
[ "tw", "en", "ja", "ko", "id", "vi", "th" ]
TAGS #task_categories-translation #size_categories-1M<n<10M #language-Twi #language-English #language-Japanese #language-Korean #language-Indonesian #language-Vietnamese #language-Thai #license-unknown #translation #OPUS #OpenSubtitles #region-us
# Dataset Card for [OpenSubtitles-TW-Corpus] ## Table of Contents - Table of Contents - Dataset Description - Dataset Summary - Supported Tasks and Leaderboards - Languages - Dataset Structure - Data Instances - Data Fields - Data Splits - Dataset Creation - Curation Rationale - Source Data - Annotations - Personal and Sensitive Information - Considerations for Using the Data - Social Impact of Dataset - Discussion of Biases - Other Known Limitations - Additional Information - Dataset Curators - Licensing Information - Citation Information - Contributions ## Dataset Description - Homepage: - Repository: - Paper: - Leaderboard: - Point of Contact: Heng-Shiou Sheu ### Dataset Summary OpenSubtitles-TW-Corpus 是一個機器翻譯基準的多語言資料集,源自 OpenSubtitles 收集的使用者貢獻的翻譯,並由 OPUS。該資料集包括按語言對排序的測試和開發資料。它包括數百種語言對的測試集,並且不斷更新。請檢查版本號標籤以引用您正在使用的版本。 這是字幕集合的稍微乾淨的版本,使用改進的句子對齊和更好的語言檢查。 ### Supported Tasks and Leaderboards ### Languages 此資料集涵蓋數百種語言和語言對,並按 ISO-639-1 語言組織。目前版本涵蓋以下語言。繁體中文、英文、日文、韓文、印尼文、越南文、泰文 ## Dataset Structure ### Data Instances 資料以 , 分隔檔案中內容,具有三個欄位:指示、輸入和輸出。請注意,我們並不暗示平移方向,並認為資料集是對稱的並用作兩個方向的測試集。 ### Data Splits 先整理出 Train 資料。 ## Dataset Creation ### Curation Rationale 本資料集將持續更新,未來將公開發佈於 Github 當中。高語言覆蓋率是本計畫的主要目標,資料集的準備與標準化語言標籤和分發格式保持一致和系統化。 ### Source Data #### Initial Data Collection and Normalization OpenSubtitles 資料集是從 P. Lison and J. Tiedemann, 2016, OpenSubtitles2016: Extracting Large Parallel Corpora from Movie and TV Subtitles進行整理,並由 OPUS 收納。 #### Who are the source language producers? 這些轉錄本由 OpenSubtitles 所有 ### Personal and Sensitive Information 有關處理個人資訊和敏感資訊的信息,我們請諮詢資料的原始提供者。該資料集未經過任何方式處理以檢測或刪除潛在的敏感資訊或個人資訊。 ### Social Impact of Dataset 語言覆蓋率很高,因此它代表了機器翻譯開發的非常有價值的資源,特別是對於資源較少的語言和語言對。不斷成長的資料庫也代表著一種動態資源,其價值將進一步成長。 ### Other Known Limitations 這些句子通常很短,因此很容易翻譯。對於高資源語言,這會導致結果不如更具挑戰性的基準有用。對於資源較少的語言對來說,即使在非常具有挑戰性的設定中,範例的有限複雜性實際上也是衡量進度的一件好事。 ### Dataset Curators 此資料集由Heng-Shiou Sheu 製作。 ### Licensing Information 如果您在自己的工作中使用該語料庫的任何部分,請引用以下文章:P. Lison 和 J. Tiedemann,2016,OpenSubtitles2016:從電影和電視字幕中提取大型並行語料庫。第十屆國際語言資源與評估會議 (LREC 2016) 論文集
[ "# Dataset Card for [OpenSubtitles-TW-Corpus]", "## Table of Contents\n- Table of Contents\n- Dataset Description\n - Dataset Summary\n - Supported Tasks and Leaderboards\n - Languages\n- Dataset Structure\n - Data Instances\n - Data Fields\n - Data Splits\n- Dataset Creation\n - Curation Rationale\n - Source Data\n - Annotations\n - Personal and Sensitive Information\n- Considerations for Using the Data\n - Social Impact of Dataset\n - Discussion of Biases\n - Other Known Limitations\n- Additional Information\n - Dataset Curators\n - Licensing Information\n - Citation Information\n - Contributions", "## Dataset Description\n- Homepage: \n- Repository: \n- Paper: \n- Leaderboard:\n- Point of Contact: Heng-Shiou Sheu", "### Dataset Summary\n\nOpenSubtitles-TW-Corpus 是一個機器翻譯基準的多語言資料集,源自 OpenSubtitles 收集的使用者貢獻的翻譯,並由 OPUS。該資料集包括按語言對排序的測試和開發資料。它包括數百種語言對的測試集,並且不斷更新。請檢查版本號標籤以引用您正在使用的版本。\n這是字幕集合的稍微乾淨的版本,使用改進的句子對齊和更好的語言檢查。", "### Supported Tasks and Leaderboards", "### Languages\n此資料集涵蓋數百種語言和語言對,並按 ISO-639-1 語言組織。目前版本涵蓋以下語言。繁體中文、英文、日文、韓文、印尼文、越南文、泰文", "## Dataset Structure", "### Data Instances\n\n資料以 , 分隔檔案中內容,具有三個欄位:指示、輸入和輸出。請注意,我們並不暗示平移方向,並認為資料集是對稱的並用作兩個方向的測試集。", "### Data Splits\n先整理出 Train 資料。", "## Dataset Creation", "### Curation Rationale\n本資料集將持續更新,未來將公開發佈於 Github 當中。高語言覆蓋率是本計畫的主要目標,資料集的準備與標準化語言標籤和分發格式保持一致和系統化。", "### Source Data", "#### Initial Data Collection and Normalization\nOpenSubtitles 資料集是從 P. Lison and J. Tiedemann, 2016, OpenSubtitles2016: Extracting Large Parallel Corpora from Movie and TV Subtitles進行整理,並由 OPUS 收納。", "#### Who are the source language producers?\n這些轉錄本由 OpenSubtitles 所有", "### Personal and Sensitive Information\n有關處理個人資訊和敏感資訊的信息,我們請諮詢資料的原始提供者。該資料集未經過任何方式處理以檢測或刪除潛在的敏感資訊或個人資訊。", "### Social Impact of Dataset\n語言覆蓋率很高,因此它代表了機器翻譯開發的非常有價值的資源,特別是對於資源較少的語言和語言對。不斷成長的資料庫也代表著一種動態資源,其價值將進一步成長。", "### Other Known Limitations\n這些句子通常很短,因此很容易翻譯。對於高資源語言,這會導致結果不如更具挑戰性的基準有用。對於資源較少的語言對來說,即使在非常具有挑戰性的設定中,範例的有限複雜性實際上也是衡量進度的一件好事。", "### Dataset Curators\n此資料集由Heng-Shiou Sheu 製作。", "### Licensing Information\n如果您在自己的工作中使用該語料庫的任何部分,請引用以下文章:P. Lison 和 J. Tiedemann,2016,OpenSubtitles2016:從電影和電視字幕中提取大型並行語料庫。第十屆國際語言資源與評估會議 (LREC 2016) 論文集" ]
[ "TAGS\n#task_categories-translation #size_categories-1M<n<10M #language-Twi #language-English #language-Japanese #language-Korean #language-Indonesian #language-Vietnamese #language-Thai #license-unknown #translation #OPUS #OpenSubtitles #region-us \n", "# Dataset Card for [OpenSubtitles-TW-Corpus]", "## Table of Contents\n- Table of Contents\n- Dataset Description\n - Dataset Summary\n - Supported Tasks and Leaderboards\n - Languages\n- Dataset Structure\n - Data Instances\n - Data Fields\n - Data Splits\n- Dataset Creation\n - Curation Rationale\n - Source Data\n - Annotations\n - Personal and Sensitive Information\n- Considerations for Using the Data\n - Social Impact of Dataset\n - Discussion of Biases\n - Other Known Limitations\n- Additional Information\n - Dataset Curators\n - Licensing Information\n - Citation Information\n - Contributions", "## Dataset Description\n- Homepage: \n- Repository: \n- Paper: \n- Leaderboard:\n- Point of Contact: Heng-Shiou Sheu", "### Dataset Summary\n\nOpenSubtitles-TW-Corpus 是一個機器翻譯基準的多語言資料集,源自 OpenSubtitles 收集的使用者貢獻的翻譯,並由 OPUS。該資料集包括按語言對排序的測試和開發資料。它包括數百種語言對的測試集,並且不斷更新。請檢查版本號標籤以引用您正在使用的版本。\n這是字幕集合的稍微乾淨的版本,使用改進的句子對齊和更好的語言檢查。", "### Supported Tasks and Leaderboards", "### Languages\n此資料集涵蓋數百種語言和語言對,並按 ISO-639-1 語言組織。目前版本涵蓋以下語言。繁體中文、英文、日文、韓文、印尼文、越南文、泰文", "## Dataset Structure", "### Data Instances\n\n資料以 , 分隔檔案中內容,具有三個欄位:指示、輸入和輸出。請注意,我們並不暗示平移方向,並認為資料集是對稱的並用作兩個方向的測試集。", "### Data Splits\n先整理出 Train 資料。", "## Dataset Creation", "### Curation Rationale\n本資料集將持續更新,未來將公開發佈於 Github 當中。高語言覆蓋率是本計畫的主要目標,資料集的準備與標準化語言標籤和分發格式保持一致和系統化。", "### Source Data", "#### Initial Data Collection and Normalization\nOpenSubtitles 資料集是從 P. Lison and J. Tiedemann, 2016, OpenSubtitles2016: Extracting Large Parallel Corpora from Movie and TV Subtitles進行整理,並由 OPUS 收納。", "#### Who are the source language producers?\n這些轉錄本由 OpenSubtitles 所有", "### Personal and Sensitive Information\n有關處理個人資訊和敏感資訊的信息,我們請諮詢資料的原始提供者。該資料集未經過任何方式處理以檢測或刪除潛在的敏感資訊或個人資訊。", "### Social Impact of Dataset\n語言覆蓋率很高,因此它代表了機器翻譯開發的非常有價值的資源,特別是對於資源較少的語言和語言對。不斷成長的資料庫也代表著一種動態資源,其價值將進一步成長。", "### Other Known Limitations\n這些句子通常很短,因此很容易翻譯。對於高資源語言,這會導致結果不如更具挑戰性的基準有用。對於資源較少的語言對來說,即使在非常具有挑戰性的設定中,範例的有限複雜性實際上也是衡量進度的一件好事。", "### Dataset Curators\n此資料集由Heng-Shiou Sheu 製作。", "### Licensing Information\n如果您在自己的工作中使用該語料庫的任何部分,請引用以下文章:P. Lison 和 J. Tiedemann,2016,OpenSubtitles2016:從電影和電視字幕中提取大型並行語料庫。第十屆國際語言資源與評估會議 (LREC 2016) 論文集" ]
823cc40b5bb444570675abc218adbd10bea5cb98
Created with [this script](https://gist.github.com/xzuyn/5807bbc2a305590f7b49b879dc0354ad), so I assume everything is 100% correct (with rounding).
PJMixers/Math-1K
[ "size_categories:1K<n<10K", "language:en", "math", "region:us" ]
2024-02-02T11:40:55+00:00
{"language": ["en"], "size_categories": ["1K<n<10K"], "tags": ["math"]}
2024-02-02T11:58:28+00:00
[]
[ "en" ]
TAGS #size_categories-1K<n<10K #language-English #math #region-us
Created with this script, so I assume everything is 100% correct (with rounding).
[]
[ "TAGS\n#size_categories-1K<n<10K #language-English #math #region-us \n" ]
407d59feae445c585616c825e0e02bae22e6babd
Created with [this script](https://gist.github.com/xzuyn/5807bbc2a305590f7b49b879dc0354ad), so I assume everything is 100% correct (with rounding).
PJMixers/Math-10K
[ "size_categories:10K<n<100K", "language:en", "math", "region:us" ]
2024-02-02T11:42:24+00:00
{"language": ["en"], "size_categories": ["10K<n<100K"], "tags": ["math"]}
2024-02-02T11:59:19+00:00
[]
[ "en" ]
TAGS #size_categories-10K<n<100K #language-English #math #region-us
Created with this script, so I assume everything is 100% correct (with rounding).
[]
[ "TAGS\n#size_categories-10K<n<100K #language-English #math #region-us \n" ]
003f4c57eb89c58f9c4659e00ab419b451148994
Created with [this script](https://gist.github.com/xzuyn/5807bbc2a305590f7b49b879dc0354ad), so I assume everything is 100% correct (with rounding).
PJMixers/Math-100K
[ "size_categories:100K<n<1M", "language:en", "math", "region:us" ]
2024-02-02T11:43:37+00:00
{"language": ["en"], "size_categories": ["100K<n<1M"], "tags": ["math"]}
2024-02-02T11:59:59+00:00
[]
[ "en" ]
TAGS #size_categories-100K<n<1M #language-English #math #region-us
Created with this script, so I assume everything is 100% correct (with rounding).
[]
[ "TAGS\n#size_categories-100K<n<1M #language-English #math #region-us \n" ]
ed7fa0ea4fd47952f4991b19e5fff742bba04bd0
Created with [this script](https://gist.github.com/xzuyn/5807bbc2a305590f7b49b879dc0354ad), so I assume everything is 100% correct (with rounding).
PJMixers/Math-1M
[ "size_categories:1M<n<10M", "language:en", "math", "region:us" ]
2024-02-02T11:44:49+00:00
{"language": ["en"], "size_categories": ["1M<n<10M"], "tags": ["math"]}
2024-02-02T12:01:30+00:00
[]
[ "en" ]
TAGS #size_categories-1M<n<10M #language-English #math #region-us
Created with this script, so I assume everything is 100% correct (with rounding).
[]
[ "TAGS\n#size_categories-1M<n<10M #language-English #math #region-us \n" ]
92b859043d6fd0e81fd9617eb1555f6cfc2724d4
Created with [this script](https://gist.github.com/xzuyn/5807bbc2a305590f7b49b879dc0354ad), so I assume everything is 100% correct (with rounding).
PJMixers/Math-10M
[ "size_categories:10M<n<100M", "language:en", "math", "region:us" ]
2024-02-02T12:02:20+00:00
{"language": ["en"], "size_categories": ["10M<n<100M"], "tags": ["math"]}
2024-02-02T12:32:53+00:00
[]
[ "en" ]
TAGS #size_categories-10M<n<100M #language-English #math #region-us
Created with this script, so I assume everything is 100% correct (with rounding).
[]
[ "TAGS\n#size_categories-10M<n<100M #language-English #math #region-us \n" ]
0aa5c8fa22ac29bf815f71d735d36ea7478bbd30
# Dataset Card for personal_preference_eval ## Dataset Description Dataset for personal preference eval in paper "[Linear Alignment: A Closed-form Solution for Aligning Human Preferences without Tuning and Feedback](https://arxiv.org/abs/2401.11458)" ## Field Description | Field Name | Field Description | | --------------------------- | ---------------------- | | index | Index of data point. | | domain | Domain of question. | | question | User query. | | preference_a | Description of user_a. | | preference_b | Description of user_b. | | preference_c | Description of user_c. | | preference_d | Description of user_d. | | answer_a | GPT-4's response to user_a's query. | | answer_b | GPT-4's response to user_b's query. | | answer_c | GPT-4's response to user_c's query. | | answer_d | GPT-4's response to user_d's query. |
kkuusou/personal_preference_eval
[ "size_categories:n<1K", "language:en", "license:mit", "arxiv:2401.11458", "region:us" ]
2024-02-02T12:16:23+00:00
{"language": ["en"], "license": "mit", "size_categories": ["n<1K"]}
2024-02-02T12:51:03+00:00
[ "2401.11458" ]
[ "en" ]
TAGS #size_categories-n<1K #language-English #license-mit #arxiv-2401.11458 #region-us
Dataset Card for personal\_preference\_eval =========================================== Dataset Description ------------------- Dataset for personal preference eval in paper "Linear Alignment: A Closed-form Solution for Aligning Human Preferences without Tuning and Feedback" Field Description -----------------
[]
[ "TAGS\n#size_categories-n<1K #language-English #license-mit #arxiv-2401.11458 #region-us \n" ]
cc30920f82ab5dfc05e6fc31198400e21a2724a6
# Dataset Card for Evaluation run of nisten/BigCodeLlama-92b <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [nisten/BigCodeLlama-92b](https://huggingface.co/nisten/BigCodeLlama-92b) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_nisten__BigCodeLlama-92b", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-02-02T12:17:18.661697](https://huggingface.co/datasets/open-llm-leaderboard/details_nisten__BigCodeLlama-92b/blob/main/results_2024-02-02T12-17-18.661697.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.5550751843237683, "acc_stderr": 0.034097312071109324, "acc_norm": 0.5577029118828865, "acc_norm_stderr": 0.03479549689455188, "mc1": 0.3574051407588739, "mc1_stderr": 0.0167765996767294, "mc2": 0.5133974088327335, "mc2_stderr": 0.015193794273863215 }, "harness|arc:challenge|25": { "acc": 0.5196245733788396, "acc_stderr": 0.014600132075947105, "acc_norm": 0.5477815699658704, "acc_norm_stderr": 0.01454451988063383 }, "harness|hellaswag|10": { "acc": 0.5812587134037045, "acc_stderr": 0.00492344562786152, "acc_norm": 0.7784305915156343, "acc_norm_stderr": 0.004144540263219887 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.31, "acc_stderr": 0.046482319871173156, "acc_norm": 0.31, "acc_norm_stderr": 0.046482319871173156 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.5111111111111111, "acc_stderr": 0.04318275491977976, "acc_norm": 0.5111111111111111, "acc_norm_stderr": 0.04318275491977976 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.5526315789473685, "acc_stderr": 0.040463368839782514, "acc_norm": 0.5526315789473685, "acc_norm_stderr": 0.040463368839782514 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.61, "acc_stderr": 0.04902071300001975, "acc_norm": 0.61, "acc_norm_stderr": 0.04902071300001975 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.5283018867924528, "acc_stderr": 0.030723535249006107, "acc_norm": 0.5283018867924528, "acc_norm_stderr": 0.030723535249006107 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.5069444444444444, "acc_stderr": 0.041808067502949374, "acc_norm": 0.5069444444444444, "acc_norm_stderr": 0.041808067502949374 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.35, "acc_stderr": 0.0479372485441102, "acc_norm": 0.35, "acc_norm_stderr": 0.0479372485441102 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.44, "acc_stderr": 0.0498887651569859, "acc_norm": 0.44, "acc_norm_stderr": 0.0498887651569859 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.42, "acc_stderr": 0.049604496374885836, "acc_norm": 0.42, "acc_norm_stderr": 0.049604496374885836 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.4682080924855491, "acc_stderr": 0.03804749744364764, "acc_norm": 0.4682080924855491, "acc_norm_stderr": 0.03804749744364764 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.27450980392156865, "acc_stderr": 0.044405219061793275, "acc_norm": 0.27450980392156865, "acc_norm_stderr": 0.044405219061793275 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.7, "acc_stderr": 0.046056618647183814, "acc_norm": 0.7, "acc_norm_stderr": 0.046056618647183814 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.5276595744680851, "acc_stderr": 0.03263597118409769, "acc_norm": 0.5276595744680851, "acc_norm_stderr": 0.03263597118409769 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.39473684210526316, "acc_stderr": 0.045981880578165414, "acc_norm": 0.39473684210526316, "acc_norm_stderr": 0.045981880578165414 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.503448275862069, "acc_stderr": 0.04166567577101579, "acc_norm": 0.503448275862069, "acc_norm_stderr": 0.04166567577101579 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.4074074074074074, "acc_stderr": 0.02530590624159063, "acc_norm": 0.4074074074074074, "acc_norm_stderr": 0.02530590624159063 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.42063492063492064, "acc_stderr": 0.04415438226743744, "acc_norm": 0.42063492063492064, "acc_norm_stderr": 0.04415438226743744 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.38, "acc_stderr": 0.048783173121456316, "acc_norm": 0.38, "acc_norm_stderr": 0.048783173121456316 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.6258064516129033, "acc_stderr": 0.027528904299845697, "acc_norm": 0.6258064516129033, "acc_norm_stderr": 0.027528904299845697 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.4039408866995074, "acc_stderr": 0.0345245390382204, "acc_norm": 0.4039408866995074, "acc_norm_stderr": 0.0345245390382204 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.72, "acc_stderr": 0.045126085985421276, "acc_norm": 0.72, "acc_norm_stderr": 0.045126085985421276 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.7393939393939394, "acc_stderr": 0.034277431758165236, "acc_norm": 0.7393939393939394, "acc_norm_stderr": 0.034277431758165236 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.6919191919191919, "acc_stderr": 0.03289477330098615, "acc_norm": 0.6919191919191919, "acc_norm_stderr": 0.03289477330098615 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.7668393782383419, "acc_stderr": 0.03051611137147601, "acc_norm": 0.7668393782383419, "acc_norm_stderr": 0.03051611137147601 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.5102564102564102, "acc_stderr": 0.025345672221942374, "acc_norm": 0.5102564102564102, "acc_norm_stderr": 0.025345672221942374 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.34814814814814815, "acc_stderr": 0.029045600290616258, "acc_norm": 0.34814814814814815, "acc_norm_stderr": 0.029045600290616258 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.5126050420168067, "acc_stderr": 0.03246816765752174, "acc_norm": 0.5126050420168067, "acc_norm_stderr": 0.03246816765752174 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.36423841059602646, "acc_stderr": 0.03929111781242741, "acc_norm": 0.36423841059602646, "acc_norm_stderr": 0.03929111781242741 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.726605504587156, "acc_stderr": 0.019109299846098292, "acc_norm": 0.726605504587156, "acc_norm_stderr": 0.019109299846098292 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.4351851851851852, "acc_stderr": 0.03381200005643525, "acc_norm": 0.4351851851851852, "acc_norm_stderr": 0.03381200005643525 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.7009803921568627, "acc_stderr": 0.03213325717373616, "acc_norm": 0.7009803921568627, "acc_norm_stderr": 0.03213325717373616 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.7510548523206751, "acc_stderr": 0.028146970599422644, "acc_norm": 0.7510548523206751, "acc_norm_stderr": 0.028146970599422644 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.600896860986547, "acc_stderr": 0.03286745312567961, "acc_norm": 0.600896860986547, "acc_norm_stderr": 0.03286745312567961 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.6106870229007634, "acc_stderr": 0.0427648654281459, "acc_norm": 0.6106870229007634, "acc_norm_stderr": 0.0427648654281459 }, "harness|hendrycksTest-international_law|5": { "acc": 0.6859504132231405, "acc_stderr": 0.04236964753041018, "acc_norm": 0.6859504132231405, "acc_norm_stderr": 0.04236964753041018 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.6851851851851852, "acc_stderr": 0.04489931073591312, "acc_norm": 0.6851851851851852, "acc_norm_stderr": 0.04489931073591312 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.6932515337423313, "acc_stderr": 0.036230899157241474, "acc_norm": 0.6932515337423313, "acc_norm_stderr": 0.036230899157241474 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.44642857142857145, "acc_stderr": 0.04718471485219588, "acc_norm": 0.44642857142857145, "acc_norm_stderr": 0.04718471485219588 }, "harness|hendrycksTest-management|5": { "acc": 0.7184466019417476, "acc_stderr": 0.044532548363264673, "acc_norm": 0.7184466019417476, "acc_norm_stderr": 0.044532548363264673 }, "harness|hendrycksTest-marketing|5": { "acc": 0.8205128205128205, "acc_stderr": 0.02514093595033544, "acc_norm": 0.8205128205128205, "acc_norm_stderr": 0.02514093595033544 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.58, "acc_stderr": 0.04960449637488583, "acc_norm": 0.58, "acc_norm_stderr": 0.04960449637488583 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.7049808429118773, "acc_stderr": 0.016308363772932724, "acc_norm": 0.7049808429118773, "acc_norm_stderr": 0.016308363772932724 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.5751445086705202, "acc_stderr": 0.02661335084026174, "acc_norm": 0.5751445086705202, "acc_norm_stderr": 0.02661335084026174 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.329608938547486, "acc_stderr": 0.01572153107518387, "acc_norm": 0.329608938547486, "acc_norm_stderr": 0.01572153107518387 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.5424836601307189, "acc_stderr": 0.02852638345214264, "acc_norm": 0.5424836601307189, "acc_norm_stderr": 0.02852638345214264 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.639871382636656, "acc_stderr": 0.027264297599804015, "acc_norm": 0.639871382636656, "acc_norm_stderr": 0.027264297599804015 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.5895061728395061, "acc_stderr": 0.027371350925124768, "acc_norm": 0.5895061728395061, "acc_norm_stderr": 0.027371350925124768 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.45390070921985815, "acc_stderr": 0.029700453247291484, "acc_norm": 0.45390070921985815, "acc_norm_stderr": 0.029700453247291484 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.4074315514993481, "acc_stderr": 0.012549473714212226, "acc_norm": 0.4074315514993481, "acc_norm_stderr": 0.012549473714212226 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.4117647058823529, "acc_stderr": 0.029896163033125468, "acc_norm": 0.4117647058823529, "acc_norm_stderr": 0.029896163033125468 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.5375816993464052, "acc_stderr": 0.02017061497496977, "acc_norm": 0.5375816993464052, "acc_norm_stderr": 0.02017061497496977 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.6636363636363637, "acc_stderr": 0.04525393596302506, "acc_norm": 0.6636363636363637, "acc_norm_stderr": 0.04525393596302506 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.6244897959183674, "acc_stderr": 0.03100120903989484, "acc_norm": 0.6244897959183674, "acc_norm_stderr": 0.03100120903989484 }, "harness|hendrycksTest-sociology|5": { "acc": 0.7562189054726368, "acc_stderr": 0.030360490154014645, "acc_norm": 0.7562189054726368, "acc_norm_stderr": 0.030360490154014645 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.75, "acc_stderr": 0.04351941398892446, "acc_norm": 0.75, "acc_norm_stderr": 0.04351941398892446 }, "harness|hendrycksTest-virology|5": { "acc": 0.45180722891566266, "acc_stderr": 0.03874371556587953, "acc_norm": 0.45180722891566266, "acc_norm_stderr": 0.03874371556587953 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.695906432748538, "acc_stderr": 0.035282112582452306, "acc_norm": 0.695906432748538, "acc_norm_stderr": 0.035282112582452306 }, "harness|truthfulqa:mc|0": { "mc1": 0.3574051407588739, "mc1_stderr": 0.0167765996767294, "mc2": 0.5133974088327335, "mc2_stderr": 0.015193794273863215 }, "harness|winogrande|5": { "acc": 0.7308602999210734, "acc_stderr": 0.012464911951268741 }, "harness|gsm8k|5": { "acc": 0.4495830174374526, "acc_stderr": 0.013702290047884745 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_nisten__BigCodeLlama-92b
[ "region:us" ]
2024-02-02T12:19:12+00:00
{"pretty_name": "Evaluation run of nisten/BigCodeLlama-92b", "dataset_summary": "Dataset automatically created during the evaluation run of model [nisten/BigCodeLlama-92b](https://huggingface.co/nisten/BigCodeLlama-92b) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_nisten__BigCodeLlama-92b\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-02-02T12:17:18.661697](https://huggingface.co/datasets/open-llm-leaderboard/details_nisten__BigCodeLlama-92b/blob/main/results_2024-02-02T12-17-18.661697.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.5550751843237683,\n \"acc_stderr\": 0.034097312071109324,\n \"acc_norm\": 0.5577029118828865,\n \"acc_norm_stderr\": 0.03479549689455188,\n \"mc1\": 0.3574051407588739,\n \"mc1_stderr\": 0.0167765996767294,\n \"mc2\": 0.5133974088327335,\n \"mc2_stderr\": 0.015193794273863215\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.5196245733788396,\n \"acc_stderr\": 0.014600132075947105,\n \"acc_norm\": 0.5477815699658704,\n \"acc_norm_stderr\": 0.01454451988063383\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.5812587134037045,\n \"acc_stderr\": 0.00492344562786152,\n \"acc_norm\": 0.7784305915156343,\n \"acc_norm_stderr\": 0.004144540263219887\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.31,\n \"acc_stderr\": 0.046482319871173156,\n \"acc_norm\": 0.31,\n \"acc_norm_stderr\": 0.046482319871173156\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.5111111111111111,\n \"acc_stderr\": 0.04318275491977976,\n \"acc_norm\": 0.5111111111111111,\n \"acc_norm_stderr\": 0.04318275491977976\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.5526315789473685,\n \"acc_stderr\": 0.040463368839782514,\n \"acc_norm\": 0.5526315789473685,\n \"acc_norm_stderr\": 0.040463368839782514\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.61,\n \"acc_stderr\": 0.04902071300001975,\n \"acc_norm\": 0.61,\n \"acc_norm_stderr\": 0.04902071300001975\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.5283018867924528,\n \"acc_stderr\": 0.030723535249006107,\n \"acc_norm\": 0.5283018867924528,\n \"acc_norm_stderr\": 0.030723535249006107\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.5069444444444444,\n \"acc_stderr\": 0.041808067502949374,\n \"acc_norm\": 0.5069444444444444,\n \"acc_norm_stderr\": 0.041808067502949374\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.35,\n \"acc_stderr\": 0.0479372485441102,\n \"acc_norm\": 0.35,\n \"acc_norm_stderr\": 0.0479372485441102\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.44,\n \"acc_stderr\": 0.0498887651569859,\n \"acc_norm\": 0.44,\n \"acc_norm_stderr\": 0.0498887651569859\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.42,\n \"acc_stderr\": 0.049604496374885836,\n \"acc_norm\": 0.42,\n \"acc_norm_stderr\": 0.049604496374885836\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.4682080924855491,\n \"acc_stderr\": 0.03804749744364764,\n \"acc_norm\": 0.4682080924855491,\n \"acc_norm_stderr\": 0.03804749744364764\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.27450980392156865,\n \"acc_stderr\": 0.044405219061793275,\n \"acc_norm\": 0.27450980392156865,\n \"acc_norm_stderr\": 0.044405219061793275\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.7,\n \"acc_stderr\": 0.046056618647183814,\n \"acc_norm\": 0.7,\n \"acc_norm_stderr\": 0.046056618647183814\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.5276595744680851,\n \"acc_stderr\": 0.03263597118409769,\n \"acc_norm\": 0.5276595744680851,\n \"acc_norm_stderr\": 0.03263597118409769\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.39473684210526316,\n \"acc_stderr\": 0.045981880578165414,\n \"acc_norm\": 0.39473684210526316,\n \"acc_norm_stderr\": 0.045981880578165414\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.503448275862069,\n \"acc_stderr\": 0.04166567577101579,\n \"acc_norm\": 0.503448275862069,\n \"acc_norm_stderr\": 0.04166567577101579\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.4074074074074074,\n \"acc_stderr\": 0.02530590624159063,\n \"acc_norm\": 0.4074074074074074,\n \"acc_norm_stderr\": 0.02530590624159063\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.42063492063492064,\n \"acc_stderr\": 0.04415438226743744,\n \"acc_norm\": 0.42063492063492064,\n \"acc_norm_stderr\": 0.04415438226743744\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.38,\n \"acc_stderr\": 0.048783173121456316,\n \"acc_norm\": 0.38,\n \"acc_norm_stderr\": 0.048783173121456316\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.6258064516129033,\n \"acc_stderr\": 0.027528904299845697,\n \"acc_norm\": 0.6258064516129033,\n \"acc_norm_stderr\": 0.027528904299845697\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.4039408866995074,\n \"acc_stderr\": 0.0345245390382204,\n \"acc_norm\": 0.4039408866995074,\n \"acc_norm_stderr\": 0.0345245390382204\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.72,\n \"acc_stderr\": 0.045126085985421276,\n \"acc_norm\": 0.72,\n \"acc_norm_stderr\": 0.045126085985421276\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.7393939393939394,\n \"acc_stderr\": 0.034277431758165236,\n \"acc_norm\": 0.7393939393939394,\n \"acc_norm_stderr\": 0.034277431758165236\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.6919191919191919,\n \"acc_stderr\": 0.03289477330098615,\n \"acc_norm\": 0.6919191919191919,\n \"acc_norm_stderr\": 0.03289477330098615\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.7668393782383419,\n \"acc_stderr\": 0.03051611137147601,\n \"acc_norm\": 0.7668393782383419,\n \"acc_norm_stderr\": 0.03051611137147601\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.5102564102564102,\n \"acc_stderr\": 0.025345672221942374,\n \"acc_norm\": 0.5102564102564102,\n \"acc_norm_stderr\": 0.025345672221942374\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.34814814814814815,\n \"acc_stderr\": 0.029045600290616258,\n \"acc_norm\": 0.34814814814814815,\n \"acc_norm_stderr\": 0.029045600290616258\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.5126050420168067,\n \"acc_stderr\": 0.03246816765752174,\n \"acc_norm\": 0.5126050420168067,\n \"acc_norm_stderr\": 0.03246816765752174\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.36423841059602646,\n \"acc_stderr\": 0.03929111781242741,\n \"acc_norm\": 0.36423841059602646,\n \"acc_norm_stderr\": 0.03929111781242741\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.726605504587156,\n \"acc_stderr\": 0.019109299846098292,\n \"acc_norm\": 0.726605504587156,\n \"acc_norm_stderr\": 0.019109299846098292\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.4351851851851852,\n \"acc_stderr\": 0.03381200005643525,\n \"acc_norm\": 0.4351851851851852,\n \"acc_norm_stderr\": 0.03381200005643525\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.7009803921568627,\n \"acc_stderr\": 0.03213325717373616,\n \"acc_norm\": 0.7009803921568627,\n \"acc_norm_stderr\": 0.03213325717373616\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.7510548523206751,\n \"acc_stderr\": 0.028146970599422644,\n \"acc_norm\": 0.7510548523206751,\n \"acc_norm_stderr\": 0.028146970599422644\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.600896860986547,\n \"acc_stderr\": 0.03286745312567961,\n \"acc_norm\": 0.600896860986547,\n \"acc_norm_stderr\": 0.03286745312567961\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.6106870229007634,\n \"acc_stderr\": 0.0427648654281459,\n \"acc_norm\": 0.6106870229007634,\n \"acc_norm_stderr\": 0.0427648654281459\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.6859504132231405,\n \"acc_stderr\": 0.04236964753041018,\n \"acc_norm\": 0.6859504132231405,\n \"acc_norm_stderr\": 0.04236964753041018\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.6851851851851852,\n \"acc_stderr\": 0.04489931073591312,\n \"acc_norm\": 0.6851851851851852,\n \"acc_norm_stderr\": 0.04489931073591312\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.6932515337423313,\n \"acc_stderr\": 0.036230899157241474,\n \"acc_norm\": 0.6932515337423313,\n \"acc_norm_stderr\": 0.036230899157241474\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.44642857142857145,\n \"acc_stderr\": 0.04718471485219588,\n \"acc_norm\": 0.44642857142857145,\n \"acc_norm_stderr\": 0.04718471485219588\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.7184466019417476,\n \"acc_stderr\": 0.044532548363264673,\n \"acc_norm\": 0.7184466019417476,\n \"acc_norm_stderr\": 0.044532548363264673\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8205128205128205,\n \"acc_stderr\": 0.02514093595033544,\n \"acc_norm\": 0.8205128205128205,\n \"acc_norm_stderr\": 0.02514093595033544\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.58,\n \"acc_stderr\": 0.04960449637488583,\n \"acc_norm\": 0.58,\n \"acc_norm_stderr\": 0.04960449637488583\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.7049808429118773,\n \"acc_stderr\": 0.016308363772932724,\n \"acc_norm\": 0.7049808429118773,\n \"acc_norm_stderr\": 0.016308363772932724\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.5751445086705202,\n \"acc_stderr\": 0.02661335084026174,\n \"acc_norm\": 0.5751445086705202,\n \"acc_norm_stderr\": 0.02661335084026174\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.329608938547486,\n \"acc_stderr\": 0.01572153107518387,\n \"acc_norm\": 0.329608938547486,\n \"acc_norm_stderr\": 0.01572153107518387\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.5424836601307189,\n \"acc_stderr\": 0.02852638345214264,\n \"acc_norm\": 0.5424836601307189,\n \"acc_norm_stderr\": 0.02852638345214264\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.639871382636656,\n \"acc_stderr\": 0.027264297599804015,\n \"acc_norm\": 0.639871382636656,\n \"acc_norm_stderr\": 0.027264297599804015\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.5895061728395061,\n \"acc_stderr\": 0.027371350925124768,\n \"acc_norm\": 0.5895061728395061,\n \"acc_norm_stderr\": 0.027371350925124768\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.45390070921985815,\n \"acc_stderr\": 0.029700453247291484,\n \"acc_norm\": 0.45390070921985815,\n \"acc_norm_stderr\": 0.029700453247291484\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.4074315514993481,\n \"acc_stderr\": 0.012549473714212226,\n \"acc_norm\": 0.4074315514993481,\n \"acc_norm_stderr\": 0.012549473714212226\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.4117647058823529,\n \"acc_stderr\": 0.029896163033125468,\n \"acc_norm\": 0.4117647058823529,\n \"acc_norm_stderr\": 0.029896163033125468\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.5375816993464052,\n \"acc_stderr\": 0.02017061497496977,\n \"acc_norm\": 0.5375816993464052,\n \"acc_norm_stderr\": 0.02017061497496977\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6636363636363637,\n \"acc_stderr\": 0.04525393596302506,\n \"acc_norm\": 0.6636363636363637,\n \"acc_norm_stderr\": 0.04525393596302506\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.6244897959183674,\n \"acc_stderr\": 0.03100120903989484,\n \"acc_norm\": 0.6244897959183674,\n \"acc_norm_stderr\": 0.03100120903989484\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.7562189054726368,\n \"acc_stderr\": 0.030360490154014645,\n \"acc_norm\": 0.7562189054726368,\n \"acc_norm_stderr\": 0.030360490154014645\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.75,\n \"acc_stderr\": 0.04351941398892446,\n \"acc_norm\": 0.75,\n \"acc_norm_stderr\": 0.04351941398892446\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.45180722891566266,\n \"acc_stderr\": 0.03874371556587953,\n \"acc_norm\": 0.45180722891566266,\n \"acc_norm_stderr\": 0.03874371556587953\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.695906432748538,\n \"acc_stderr\": 0.035282112582452306,\n \"acc_norm\": 0.695906432748538,\n \"acc_norm_stderr\": 0.035282112582452306\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.3574051407588739,\n \"mc1_stderr\": 0.0167765996767294,\n \"mc2\": 0.5133974088327335,\n \"mc2_stderr\": 0.015193794273863215\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.7308602999210734,\n \"acc_stderr\": 0.012464911951268741\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.4495830174374526,\n \"acc_stderr\": 0.013702290047884745\n }\n}\n```", "repo_url": "https://huggingface.co/nisten/BigCodeLlama-92b", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_02_02T12_17_18.661697", "path": ["**/details_harness|arc:challenge|25_2024-02-02T12-17-18.661697.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-02-02T12-17-18.661697.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_02_02T12_17_18.661697", "path": ["**/details_harness|gsm8k|5_2024-02-02T12-17-18.661697.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-02-02T12-17-18.661697.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_02_02T12_17_18.661697", "path": ["**/details_harness|hellaswag|10_2024-02-02T12-17-18.661697.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-02-02T12-17-18.661697.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_02_02T12_17_18.661697", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-02T12-17-18.661697.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-02-02T12-17-18.661697.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-02-02T12-17-18.661697.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-02-02T12-17-18.661697.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-02T12-17-18.661697.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-02-02T12-17-18.661697.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-02-02T12-17-18.661697.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-02-02T12-17-18.661697.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-02-02T12-17-18.661697.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-02-02T12-17-18.661697.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-02-02T12-17-18.661697.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-02-02T12-17-18.661697.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-02T12-17-18.661697.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-02-02T12-17-18.661697.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-02T12-17-18.661697.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-02T12-17-18.661697.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-02-02T12-17-18.661697.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-02-02T12-17-18.661697.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-02-02T12-17-18.661697.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-02T12-17-18.661697.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-02T12-17-18.661697.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-02T12-17-18.661697.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-02-02T12-17-18.661697.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-02T12-17-18.661697.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-02T12-17-18.661697.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-02T12-17-18.661697.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-02T12-17-18.661697.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-02-02T12-17-18.661697.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-02T12-17-18.661697.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-02T12-17-18.661697.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-02T12-17-18.661697.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-02T12-17-18.661697.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-02-02T12-17-18.661697.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-02-02T12-17-18.661697.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-02-02T12-17-18.661697.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-02-02T12-17-18.661697.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-02T12-17-18.661697.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-02-02T12-17-18.661697.parquet", "**/details_harness|hendrycksTest-management|5_2024-02-02T12-17-18.661697.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-02-02T12-17-18.661697.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-02-02T12-17-18.661697.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-02-02T12-17-18.661697.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-02-02T12-17-18.661697.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-02T12-17-18.661697.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-02-02T12-17-18.661697.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-02-02T12-17-18.661697.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-02-02T12-17-18.661697.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-02-02T12-17-18.661697.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-02-02T12-17-18.661697.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-02-02T12-17-18.661697.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-02-02T12-17-18.661697.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-02-02T12-17-18.661697.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-02-02T12-17-18.661697.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-02-02T12-17-18.661697.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-02T12-17-18.661697.parquet", "**/details_harness|hendrycksTest-virology|5_2024-02-02T12-17-18.661697.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-02-02T12-17-18.661697.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-02T12-17-18.661697.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-02-02T12-17-18.661697.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-02-02T12-17-18.661697.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-02-02T12-17-18.661697.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-02T12-17-18.661697.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-02-02T12-17-18.661697.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-02-02T12-17-18.661697.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-02-02T12-17-18.661697.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-02-02T12-17-18.661697.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-02-02T12-17-18.661697.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-02-02T12-17-18.661697.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-02-02T12-17-18.661697.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-02T12-17-18.661697.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-02-02T12-17-18.661697.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-02T12-17-18.661697.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-02T12-17-18.661697.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-02-02T12-17-18.661697.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-02-02T12-17-18.661697.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-02-02T12-17-18.661697.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-02T12-17-18.661697.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-02T12-17-18.661697.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-02T12-17-18.661697.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-02-02T12-17-18.661697.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-02T12-17-18.661697.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-02T12-17-18.661697.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-02T12-17-18.661697.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-02T12-17-18.661697.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-02-02T12-17-18.661697.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-02T12-17-18.661697.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-02T12-17-18.661697.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-02T12-17-18.661697.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-02T12-17-18.661697.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-02-02T12-17-18.661697.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-02-02T12-17-18.661697.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-02-02T12-17-18.661697.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-02-02T12-17-18.661697.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-02T12-17-18.661697.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-02-02T12-17-18.661697.parquet", "**/details_harness|hendrycksTest-management|5_2024-02-02T12-17-18.661697.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-02-02T12-17-18.661697.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-02-02T12-17-18.661697.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-02-02T12-17-18.661697.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-02-02T12-17-18.661697.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-02T12-17-18.661697.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-02-02T12-17-18.661697.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-02-02T12-17-18.661697.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-02-02T12-17-18.661697.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-02-02T12-17-18.661697.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-02-02T12-17-18.661697.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-02-02T12-17-18.661697.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-02-02T12-17-18.661697.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-02-02T12-17-18.661697.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-02-02T12-17-18.661697.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-02-02T12-17-18.661697.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-02T12-17-18.661697.parquet", "**/details_harness|hendrycksTest-virology|5_2024-02-02T12-17-18.661697.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-02-02T12-17-18.661697.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_02_02T12_17_18.661697", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-02T12-17-18.661697.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-02T12-17-18.661697.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_02_02T12_17_18.661697", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-02-02T12-17-18.661697.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-02-02T12-17-18.661697.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_02_02T12_17_18.661697", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-02-02T12-17-18.661697.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-02-02T12-17-18.661697.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_02_02T12_17_18.661697", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-02-02T12-17-18.661697.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-02-02T12-17-18.661697.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_02_02T12_17_18.661697", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-02T12-17-18.661697.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-02T12-17-18.661697.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_02_02T12_17_18.661697", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-02-02T12-17-18.661697.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-02-02T12-17-18.661697.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_02_02T12_17_18.661697", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-02-02T12-17-18.661697.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-02-02T12-17-18.661697.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_02_02T12_17_18.661697", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-02-02T12-17-18.661697.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-02-02T12-17-18.661697.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_02_02T12_17_18.661697", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-02-02T12-17-18.661697.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-02-02T12-17-18.661697.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_02_02T12_17_18.661697", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-02-02T12-17-18.661697.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-02-02T12-17-18.661697.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_02_02T12_17_18.661697", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-02-02T12-17-18.661697.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-02-02T12-17-18.661697.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_02_02T12_17_18.661697", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-02-02T12-17-18.661697.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-02-02T12-17-18.661697.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_02_02T12_17_18.661697", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-02T12-17-18.661697.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-02T12-17-18.661697.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_02_02T12_17_18.661697", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-02-02T12-17-18.661697.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-02-02T12-17-18.661697.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_02_02T12_17_18.661697", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-02T12-17-18.661697.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-02T12-17-18.661697.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_02_02T12_17_18.661697", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-02T12-17-18.661697.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-02T12-17-18.661697.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_02_02T12_17_18.661697", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-02-02T12-17-18.661697.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-02-02T12-17-18.661697.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_02_02T12_17_18.661697", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-02-02T12-17-18.661697.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-02-02T12-17-18.661697.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_02_02T12_17_18.661697", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-02-02T12-17-18.661697.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-02-02T12-17-18.661697.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_02_02T12_17_18.661697", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-02T12-17-18.661697.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-02T12-17-18.661697.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_02_02T12_17_18.661697", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-02T12-17-18.661697.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-02T12-17-18.661697.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_02_02T12_17_18.661697", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-02T12-17-18.661697.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-02T12-17-18.661697.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_02_02T12_17_18.661697", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-02-02T12-17-18.661697.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-02-02T12-17-18.661697.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_02_02T12_17_18.661697", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-02T12-17-18.661697.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-02T12-17-18.661697.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_02_02T12_17_18.661697", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-02T12-17-18.661697.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-02T12-17-18.661697.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_02_02T12_17_18.661697", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-02T12-17-18.661697.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-02T12-17-18.661697.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_02_02T12_17_18.661697", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-02T12-17-18.661697.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-02T12-17-18.661697.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_02_02T12_17_18.661697", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-02-02T12-17-18.661697.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-02-02T12-17-18.661697.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_02_02T12_17_18.661697", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-02T12-17-18.661697.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-02T12-17-18.661697.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_02_02T12_17_18.661697", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-02T12-17-18.661697.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-02T12-17-18.661697.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_02_02T12_17_18.661697", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-02T12-17-18.661697.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-02T12-17-18.661697.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_02_02T12_17_18.661697", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-02T12-17-18.661697.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-02T12-17-18.661697.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_02_02T12_17_18.661697", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-02-02T12-17-18.661697.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-02-02T12-17-18.661697.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_02_02T12_17_18.661697", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-02-02T12-17-18.661697.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-02-02T12-17-18.661697.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_02_02T12_17_18.661697", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-02-02T12-17-18.661697.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-02-02T12-17-18.661697.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_02_02T12_17_18.661697", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-02-02T12-17-18.661697.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-02-02T12-17-18.661697.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_02_02T12_17_18.661697", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-02T12-17-18.661697.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-02T12-17-18.661697.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_02_02T12_17_18.661697", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-02-02T12-17-18.661697.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-02-02T12-17-18.661697.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_02_02T12_17_18.661697", "path": ["**/details_harness|hendrycksTest-management|5_2024-02-02T12-17-18.661697.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-02-02T12-17-18.661697.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_02_02T12_17_18.661697", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-02-02T12-17-18.661697.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-02-02T12-17-18.661697.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_02_02T12_17_18.661697", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-02-02T12-17-18.661697.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-02-02T12-17-18.661697.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_02_02T12_17_18.661697", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-02-02T12-17-18.661697.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-02-02T12-17-18.661697.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_02_02T12_17_18.661697", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-02-02T12-17-18.661697.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-02-02T12-17-18.661697.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_02_02T12_17_18.661697", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-02T12-17-18.661697.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-02T12-17-18.661697.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_02_02T12_17_18.661697", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-02-02T12-17-18.661697.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-02-02T12-17-18.661697.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_02_02T12_17_18.661697", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-02-02T12-17-18.661697.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-02-02T12-17-18.661697.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_02_02T12_17_18.661697", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-02-02T12-17-18.661697.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-02-02T12-17-18.661697.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_02_02T12_17_18.661697", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-02-02T12-17-18.661697.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-02-02T12-17-18.661697.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_02_02T12_17_18.661697", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-02-02T12-17-18.661697.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-02-02T12-17-18.661697.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_02_02T12_17_18.661697", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-02-02T12-17-18.661697.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-02-02T12-17-18.661697.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_02_02T12_17_18.661697", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-02-02T12-17-18.661697.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-02-02T12-17-18.661697.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_02_02T12_17_18.661697", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-02-02T12-17-18.661697.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-02-02T12-17-18.661697.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_02_02T12_17_18.661697", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-02-02T12-17-18.661697.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-02-02T12-17-18.661697.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_02_02T12_17_18.661697", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-02-02T12-17-18.661697.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-02-02T12-17-18.661697.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_02_02T12_17_18.661697", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-02T12-17-18.661697.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-02T12-17-18.661697.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_02_02T12_17_18.661697", "path": ["**/details_harness|hendrycksTest-virology|5_2024-02-02T12-17-18.661697.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-02-02T12-17-18.661697.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_02_02T12_17_18.661697", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-02-02T12-17-18.661697.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-02-02T12-17-18.661697.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_02_02T12_17_18.661697", "path": ["**/details_harness|truthfulqa:mc|0_2024-02-02T12-17-18.661697.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-02-02T12-17-18.661697.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_02_02T12_17_18.661697", "path": ["**/details_harness|winogrande|5_2024-02-02T12-17-18.661697.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-02-02T12-17-18.661697.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_02_02T12_17_18.661697", "path": ["results_2024-02-02T12-17-18.661697.parquet"]}, {"split": "latest", "path": ["results_2024-02-02T12-17-18.661697.parquet"]}]}]}
2024-02-02T12:19:44+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of nisten/BigCodeLlama-92b Dataset automatically created during the evaluation run of model nisten/BigCodeLlama-92b on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-02-02T12:17:18.661697(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of nisten/BigCodeLlama-92b\n\n\n\nDataset automatically created during the evaluation run of model nisten/BigCodeLlama-92b on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-02-02T12:17:18.661697(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of nisten/BigCodeLlama-92b\n\n\n\nDataset automatically created during the evaluation run of model nisten/BigCodeLlama-92b on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-02-02T12:17:18.661697(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
b757b319611c2193cf444ac9c55980828fd3855b
# VegAnn Dataset 😄 ## Dataset Description 📖 VegAnn, short for Vegetation Annotation, is a meticulously curated collection of 3,775 multi-crop RGB images aimed at enhancing research in crop vegetation segmentation. These images span various phenological stages and were captured using diverse systems and platforms under a wide range of illumination conditions. By aggregating sub-datasets from different projects and institutions, VegAnn represents a broad spectrum of measurement conditions, crop species, and development stages. ### Languages 🌐 The annotations and documentation are primarily in English. ## Dataset Structure 🏗 ### Data Instances 📸 A VegAnn data instance consists of a 512x512 pixel RGB image patch derived from larger raw images. These patches are designed to provide sufficient detail for distinguishing between vegetation and background, crucial for applications in semantic segmentation and other forms of computer vision analysis in agricultural contexts. ![image/png](https://cdn-uploads.huggingface.co/production/uploads/645a05f09e55477fff862881/O-iKRqn8FRZnY9hBzmaU5.png) ### Data Fields 📋 - `Name`: Unique identifier for each image patch. - `System`: The imaging system used to acquire the photo (e.g., Handheld Cameras, DHP, UAV). - `Orientation`: The camera's orientation during image capture (e.g., Nadir, 45 degrees). - `latitude` and `longitude`: Geographic coordinates where the image was taken. - `date`: Date of image acquisition. - `LocAcc`: Location accuracy flag (1 for high accuracy, 0 for low or uncertain accuracy). - `Species`: The crop species featured in the image (e.g., Wheat, Maize, Soybean). - `Owner`: The institution or entity that provided the image (e.g., Arvalis, INRAe). - `Dataset-Name`: The sub-dataset or project from which the image originates (e.g., Phenomobile, Easypcc). - `TVT-split1` to `TVT-split5`: Fields indicating the train/validation/test split configurations, facilitating various experimental setups. ### Data Splits 📊 The dataset is structured into multiple splits (as indicated by `TVT-split` fields) to support different training, validation, and testing scenarios in machine learning workflows. ## Dataset Creation 🛠 ### Curation Rationale 🤔 The VegAnn dataset was developed to address the gap in available datasets for training convolutional neural networks (CNNs) for the task of semantic segmentation in real-world agricultural environments. By incorporating images from a wide array of conditions and stages of crop development, VegAnn aims to enhance the performance of segmentation algorithms, promote benchmarking, and foster research on large-scale crop vegetation segmentation. ### Source Data 🌱 #### Initial Data Collection and Normalization Images within VegAnn were sourced from various sub-datasets contributed by different institutions, each under specific acquisition configurations. These were then standardized into 512x512 pixel patches to maintain consistency across the dataset. #### Who are the source data providers? The data was provided by a collaboration of institutions including Arvalis, INRAe, The University of Tokyo, University of Queensland, NEON, and EOLAB, among others. ![image/png](https://cdn-uploads.huggingface.co/production/uploads/645a05f09e55477fff862881/W7rF7P9oexd-Q7oBGV6aF.png) ### Annotations 📝 #### Annotation process Annotations for the dataset were focused on distinguishing between vegetation and background within the images. The process ensured that the images offered sufficient spatial resolution to allow for accurate visual segmentation. #### Who are the annotators? The annotations were performed by a team comprising researchers and domain experts from the contributing institutions. ## Considerations for Using the Data 🤓 ### Social Impact of Dataset 🌍 The VegAnn dataset is expected to significantly impact agricultural research and commercial applications by enhancing the accuracy of crop monitoring, disease detection, and yield estimation through improved vegetation segmentation techniques. ### Discussion of Biases 🧐 Given the diverse sources of the images, there may be inherent biases towards certain crop types, geographical locations, and imaging conditions. Users should consider this diversity in applications and analyses. ### Licensing Information 📄 Please refer to the specific licensing agreements of the contributing institutions or contact the dataset providers for more information on usage rights and restrictions. ## Citation Information 📚 If you use the VegAnn dataset in your research, please cite the following: ``` @article{madec_vegann_2023, title = {{VegAnn}, {Vegetation} {Annotation} of multi-crop {RGB} images acquired under diverse conditions for segmentation}, volume = {10}, issn = {2052-4463}, url = {https://doi.org/10.1038/s41597-023-02098-y}, doi = {10.1038/s41597-023-02098-y}, abstract = {Applying deep learning to images of cropping systems provides new knowledge and insights in research and commercial applications. Semantic segmentation or pixel-wise classification, of RGB images acquired at the ground level, into vegetation and background is a critical step in the estimation of several canopy traits. Current state of the art methodologies based on convolutional neural networks (CNNs) are trained on datasets acquired under controlled or indoor environments. These models are unable to generalize to real-world images and hence need to be fine-tuned using new labelled datasets. This motivated the creation of the VegAnn - Vegetation Annotation - dataset, a collection of 3775 multi-crop RGB images acquired for different phenological stages using different systems and platforms in diverse illumination conditions. We anticipate that VegAnn will help improving segmentation algorithm performances, facilitate benchmarking and promote large-scale crop vegetation segmentation research.}, number = {1}, journal = {Scientific Data}, author = {Madec, Simon and Irfan, Kamran and Velumani, Kaaviya and Baret, Frederic and David, Etienne and Daubige, Gaetan and Samatan, Lucas Bernigaud and Serouart, Mario and Smith, Daniel and James, Chrisbin and Camacho, Fernando and Guo, Wei and De Solan, Benoit and Chapman, Scott C. and Weiss, Marie}, month = may, year = {2023}, pages = {302}, } ``` ## Additional Information - **Dataset Curators**: Simon Madec et al. - **Version**: 1.0 - **License**: Specified by each contributing institution - **Contact**: TBD
LifeScienceModel/VegAnn
[ "region:us" ]
2024-02-02T12:42:44+00:00
{"dataset_info": {"features": [{"name": "image", "dtype": "image"}, {"name": "mask", "dtype": "image"}, {"name": "System", "dtype": "string"}, {"name": "Orientation", "dtype": "string"}, {"name": "latitude", "dtype": "float64"}, {"name": "longitude", "dtype": "float64"}, {"name": "date", "dtype": "string"}, {"name": "LocAcc", "dtype": "int64"}, {"name": "Species", "dtype": "string"}, {"name": "Owner", "dtype": "string"}, {"name": "Dataset-Name", "dtype": "string"}, {"name": "TVT-split1", "dtype": "string"}, {"name": "TVT-split2", "dtype": "string"}, {"name": "TVT-split3", "dtype": "string"}, {"name": "TVT-split4", "dtype": "string"}, {"name": "TVT-split5", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 1896819757.9, "num_examples": 3775}], "download_size": 1940313757, "dataset_size": 1896819757.9}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}]}
2024-02-02T13:08:57+00:00
[]
[]
TAGS #region-us
# VegAnn Dataset ## Dataset Description VegAnn, short for Vegetation Annotation, is a meticulously curated collection of 3,775 multi-crop RGB images aimed at enhancing research in crop vegetation segmentation. These images span various phenological stages and were captured using diverse systems and platforms under a wide range of illumination conditions. By aggregating sub-datasets from different projects and institutions, VegAnn represents a broad spectrum of measurement conditions, crop species, and development stages. ### Languages The annotations and documentation are primarily in English. ## Dataset Structure ### Data Instances A VegAnn data instance consists of a 512x512 pixel RGB image patch derived from larger raw images. These patches are designed to provide sufficient detail for distinguishing between vegetation and background, crucial for applications in semantic segmentation and other forms of computer vision analysis in agricultural contexts. !image/png ### Data Fields - 'Name': Unique identifier for each image patch. - 'System': The imaging system used to acquire the photo (e.g., Handheld Cameras, DHP, UAV). - 'Orientation': The camera's orientation during image capture (e.g., Nadir, 45 degrees). - 'latitude' and 'longitude': Geographic coordinates where the image was taken. - 'date': Date of image acquisition. - 'LocAcc': Location accuracy flag (1 for high accuracy, 0 for low or uncertain accuracy). - 'Species': The crop species featured in the image (e.g., Wheat, Maize, Soybean). - 'Owner': The institution or entity that provided the image (e.g., Arvalis, INRAe). - 'Dataset-Name': The sub-dataset or project from which the image originates (e.g., Phenomobile, Easypcc). - 'TVT-split1' to 'TVT-split5': Fields indicating the train/validation/test split configurations, facilitating various experimental setups. ### Data Splits The dataset is structured into multiple splits (as indicated by 'TVT-split' fields) to support different training, validation, and testing scenarios in machine learning workflows. ## Dataset Creation ### Curation Rationale The VegAnn dataset was developed to address the gap in available datasets for training convolutional neural networks (CNNs) for the task of semantic segmentation in real-world agricultural environments. By incorporating images from a wide array of conditions and stages of crop development, VegAnn aims to enhance the performance of segmentation algorithms, promote benchmarking, and foster research on large-scale crop vegetation segmentation. ### Source Data #### Initial Data Collection and Normalization Images within VegAnn were sourced from various sub-datasets contributed by different institutions, each under specific acquisition configurations. These were then standardized into 512x512 pixel patches to maintain consistency across the dataset. #### Who are the source data providers? The data was provided by a collaboration of institutions including Arvalis, INRAe, The University of Tokyo, University of Queensland, NEON, and EOLAB, among others. !image/png ### Annotations #### Annotation process Annotations for the dataset were focused on distinguishing between vegetation and background within the images. The process ensured that the images offered sufficient spatial resolution to allow for accurate visual segmentation. #### Who are the annotators? The annotations were performed by a team comprising researchers and domain experts from the contributing institutions. ## Considerations for Using the Data ### Social Impact of Dataset The VegAnn dataset is expected to significantly impact agricultural research and commercial applications by enhancing the accuracy of crop monitoring, disease detection, and yield estimation through improved vegetation segmentation techniques. ### Discussion of Biases Given the diverse sources of the images, there may be inherent biases towards certain crop types, geographical locations, and imaging conditions. Users should consider this diversity in applications and analyses. ### Licensing Information Please refer to the specific licensing agreements of the contributing institutions or contact the dataset providers for more information on usage rights and restrictions. If you use the VegAnn dataset in your research, please cite the following: ## Additional Information - Dataset Curators: Simon Madec et al. - Version: 1.0 - License: Specified by each contributing institution - Contact: TBD
[ "# VegAnn Dataset", "## Dataset Description \n\nVegAnn, short for Vegetation Annotation, is a meticulously curated collection of 3,775 multi-crop RGB images aimed at enhancing research in crop vegetation segmentation. These images span various phenological stages and were captured using diverse systems and platforms under a wide range of illumination conditions. By aggregating sub-datasets from different projects and institutions, VegAnn represents a broad spectrum of measurement conditions, crop species, and development stages.", "### Languages \n\nThe annotations and documentation are primarily in English.", "## Dataset Structure", "### Data Instances \n\nA VegAnn data instance consists of a 512x512 pixel RGB image patch derived from larger raw images. These patches are designed to provide sufficient detail for distinguishing between vegetation and background, crucial for applications in semantic segmentation and other forms of computer vision analysis in agricultural contexts.\n\n\n!image/png", "### Data Fields \n\n- 'Name': Unique identifier for each image patch.\n- 'System': The imaging system used to acquire the photo (e.g., Handheld Cameras, DHP, UAV).\n- 'Orientation': The camera's orientation during image capture (e.g., Nadir, 45 degrees).\n- 'latitude' and 'longitude': Geographic coordinates where the image was taken.\n- 'date': Date of image acquisition.\n- 'LocAcc': Location accuracy flag (1 for high accuracy, 0 for low or uncertain accuracy).\n- 'Species': The crop species featured in the image (e.g., Wheat, Maize, Soybean).\n- 'Owner': The institution or entity that provided the image (e.g., Arvalis, INRAe).\n- 'Dataset-Name': The sub-dataset or project from which the image originates (e.g., Phenomobile, Easypcc).\n- 'TVT-split1' to 'TVT-split5': Fields indicating the train/validation/test split configurations, facilitating various experimental setups.", "### Data Splits \n\nThe dataset is structured into multiple splits (as indicated by 'TVT-split' fields) to support different training, validation, and testing scenarios in machine learning workflows.", "## Dataset Creation", "### Curation Rationale \n\nThe VegAnn dataset was developed to address the gap in available datasets for training convolutional neural networks (CNNs) for the task of semantic segmentation in real-world agricultural environments. By incorporating images from a wide array of conditions and stages of crop development, VegAnn aims to enhance the performance of segmentation algorithms, promote benchmarking, and foster research on large-scale crop vegetation segmentation.", "### Source Data", "#### Initial Data Collection and Normalization\n\nImages within VegAnn were sourced from various sub-datasets contributed by different institutions, each under specific acquisition configurations. These were then standardized into 512x512 pixel patches to maintain consistency across the dataset.", "#### Who are the source data providers?\n\nThe data was provided by a collaboration of institutions including Arvalis, INRAe, The University of Tokyo, University of Queensland, NEON, and EOLAB, among others.\n\n\n!image/png", "### Annotations", "#### Annotation process\n\nAnnotations for the dataset were focused on distinguishing between vegetation and background within the images. The process ensured that the images offered sufficient spatial resolution to allow for accurate visual segmentation.", "#### Who are the annotators?\n\nThe annotations were performed by a team comprising researchers and domain experts from the contributing institutions.", "## Considerations for Using the Data", "### Social Impact of Dataset \n\nThe VegAnn dataset is expected to significantly impact agricultural research and commercial applications by enhancing the accuracy of crop monitoring, disease detection, and yield estimation through improved vegetation segmentation techniques.", "### Discussion of Biases \n\nGiven the diverse sources of the images, there may be inherent biases towards certain crop types, geographical locations, and imaging conditions. Users should consider this diversity in applications and analyses.", "### Licensing Information \n\nPlease refer to the specific licensing agreements of the contributing institutions or contact the dataset providers for more information on usage rights and restrictions.\n\n \n\nIf you use the VegAnn dataset in your research, please cite the following:", "## Additional Information\n\n- Dataset Curators: Simon Madec et al.\n- Version: 1.0\n- License: Specified by each contributing institution\n- Contact: TBD" ]
[ "TAGS\n#region-us \n", "# VegAnn Dataset", "## Dataset Description \n\nVegAnn, short for Vegetation Annotation, is a meticulously curated collection of 3,775 multi-crop RGB images aimed at enhancing research in crop vegetation segmentation. These images span various phenological stages and were captured using diverse systems and platforms under a wide range of illumination conditions. By aggregating sub-datasets from different projects and institutions, VegAnn represents a broad spectrum of measurement conditions, crop species, and development stages.", "### Languages \n\nThe annotations and documentation are primarily in English.", "## Dataset Structure", "### Data Instances \n\nA VegAnn data instance consists of a 512x512 pixel RGB image patch derived from larger raw images. These patches are designed to provide sufficient detail for distinguishing between vegetation and background, crucial for applications in semantic segmentation and other forms of computer vision analysis in agricultural contexts.\n\n\n!image/png", "### Data Fields \n\n- 'Name': Unique identifier for each image patch.\n- 'System': The imaging system used to acquire the photo (e.g., Handheld Cameras, DHP, UAV).\n- 'Orientation': The camera's orientation during image capture (e.g., Nadir, 45 degrees).\n- 'latitude' and 'longitude': Geographic coordinates where the image was taken.\n- 'date': Date of image acquisition.\n- 'LocAcc': Location accuracy flag (1 for high accuracy, 0 for low or uncertain accuracy).\n- 'Species': The crop species featured in the image (e.g., Wheat, Maize, Soybean).\n- 'Owner': The institution or entity that provided the image (e.g., Arvalis, INRAe).\n- 'Dataset-Name': The sub-dataset or project from which the image originates (e.g., Phenomobile, Easypcc).\n- 'TVT-split1' to 'TVT-split5': Fields indicating the train/validation/test split configurations, facilitating various experimental setups.", "### Data Splits \n\nThe dataset is structured into multiple splits (as indicated by 'TVT-split' fields) to support different training, validation, and testing scenarios in machine learning workflows.", "## Dataset Creation", "### Curation Rationale \n\nThe VegAnn dataset was developed to address the gap in available datasets for training convolutional neural networks (CNNs) for the task of semantic segmentation in real-world agricultural environments. By incorporating images from a wide array of conditions and stages of crop development, VegAnn aims to enhance the performance of segmentation algorithms, promote benchmarking, and foster research on large-scale crop vegetation segmentation.", "### Source Data", "#### Initial Data Collection and Normalization\n\nImages within VegAnn were sourced from various sub-datasets contributed by different institutions, each under specific acquisition configurations. These were then standardized into 512x512 pixel patches to maintain consistency across the dataset.", "#### Who are the source data providers?\n\nThe data was provided by a collaboration of institutions including Arvalis, INRAe, The University of Tokyo, University of Queensland, NEON, and EOLAB, among others.\n\n\n!image/png", "### Annotations", "#### Annotation process\n\nAnnotations for the dataset were focused on distinguishing between vegetation and background within the images. The process ensured that the images offered sufficient spatial resolution to allow for accurate visual segmentation.", "#### Who are the annotators?\n\nThe annotations were performed by a team comprising researchers and domain experts from the contributing institutions.", "## Considerations for Using the Data", "### Social Impact of Dataset \n\nThe VegAnn dataset is expected to significantly impact agricultural research and commercial applications by enhancing the accuracy of crop monitoring, disease detection, and yield estimation through improved vegetation segmentation techniques.", "### Discussion of Biases \n\nGiven the diverse sources of the images, there may be inherent biases towards certain crop types, geographical locations, and imaging conditions. Users should consider this diversity in applications and analyses.", "### Licensing Information \n\nPlease refer to the specific licensing agreements of the contributing institutions or contact the dataset providers for more information on usage rights and restrictions.\n\n \n\nIf you use the VegAnn dataset in your research, please cite the following:", "## Additional Information\n\n- Dataset Curators: Simon Madec et al.\n- Version: 1.0\n- License: Specified by each contributing institution\n- Contact: TBD" ]
5152cbd897936b6ba8b666f1ffe6b78bbabf1131
# Dataset Card for Evaluation run of invalid-coder/SOLAR-10.7B-Instruct-SOLARC-M-10.7B-slerp <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [invalid-coder/SOLAR-10.7B-Instruct-SOLARC-M-10.7B-slerp](https://huggingface.co/invalid-coder/SOLAR-10.7B-Instruct-SOLARC-M-10.7B-slerp) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_invalid-coder__SOLAR-10.7B-Instruct-SOLARC-M-10.7B-slerp", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-02-02T12:41:00.359909](https://huggingface.co/datasets/open-llm-leaderboard/details_invalid-coder__SOLAR-10.7B-Instruct-SOLARC-M-10.7B-slerp/blob/main/results_2024-02-02T12-41-00.359909.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.6666138651793191, "acc_stderr": 0.031637513501573004, "acc_norm": 0.6674256198470557, "acc_norm_stderr": 0.03228287649895947, "mc1": 0.5716034271725826, "mc1_stderr": 0.017323088597314747, "mc2": 0.7173010784992183, "mc2_stderr": 0.015025206960130984 }, "harness|arc:challenge|25": { "acc": 0.6825938566552902, "acc_stderr": 0.013602239088038167, "acc_norm": 0.7107508532423208, "acc_norm_stderr": 0.01325001257939344 }, "harness|hellaswag|10": { "acc": 0.7120095598486357, "acc_stderr": 0.004519011688417164, "acc_norm": 0.8833897629954193, "acc_norm_stderr": 0.0032029933469910634 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.43, "acc_stderr": 0.049756985195624284, "acc_norm": 0.43, "acc_norm_stderr": 0.049756985195624284 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.6148148148148148, "acc_stderr": 0.04203921040156279, "acc_norm": 0.6148148148148148, "acc_norm_stderr": 0.04203921040156279 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.75, "acc_stderr": 0.03523807393012047, "acc_norm": 0.75, "acc_norm_stderr": 0.03523807393012047 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.74, "acc_stderr": 0.0440844002276808, "acc_norm": 0.74, "acc_norm_stderr": 0.0440844002276808 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.6792452830188679, "acc_stderr": 0.02872750295788027, "acc_norm": 0.6792452830188679, "acc_norm_stderr": 0.02872750295788027 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.7638888888888888, "acc_stderr": 0.03551446610810826, "acc_norm": 0.7638888888888888, "acc_norm_stderr": 0.03551446610810826 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.46, "acc_stderr": 0.05009082659620333, "acc_norm": 0.46, "acc_norm_stderr": 0.05009082659620333 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.52, "acc_stderr": 0.05021167315686779, "acc_norm": 0.52, "acc_norm_stderr": 0.05021167315686779 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.32, "acc_stderr": 0.046882617226215034, "acc_norm": 0.32, "acc_norm_stderr": 0.046882617226215034 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.6647398843930635, "acc_stderr": 0.03599586301247077, "acc_norm": 0.6647398843930635, "acc_norm_stderr": 0.03599586301247077 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.39215686274509803, "acc_stderr": 0.048580835742663454, "acc_norm": 0.39215686274509803, "acc_norm_stderr": 0.048580835742663454 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.76, "acc_stderr": 0.042923469599092816, "acc_norm": 0.76, "acc_norm_stderr": 0.042923469599092816 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.625531914893617, "acc_stderr": 0.03163910665367291, "acc_norm": 0.625531914893617, "acc_norm_stderr": 0.03163910665367291 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.5, "acc_stderr": 0.047036043419179864, "acc_norm": 0.5, "acc_norm_stderr": 0.047036043419179864 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.6275862068965518, "acc_stderr": 0.04028731532947558, "acc_norm": 0.6275862068965518, "acc_norm_stderr": 0.04028731532947558 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.48148148148148145, "acc_stderr": 0.02573364199183898, "acc_norm": 0.48148148148148145, "acc_norm_stderr": 0.02573364199183898 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.4444444444444444, "acc_stderr": 0.044444444444444495, "acc_norm": 0.4444444444444444, "acc_norm_stderr": 0.044444444444444495 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.34, "acc_stderr": 0.04760952285695235, "acc_norm": 0.34, "acc_norm_stderr": 0.04760952285695235 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.8225806451612904, "acc_stderr": 0.021732540689329286, "acc_norm": 0.8225806451612904, "acc_norm_stderr": 0.021732540689329286 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.5123152709359606, "acc_stderr": 0.035169204442208966, "acc_norm": 0.5123152709359606, "acc_norm_stderr": 0.035169204442208966 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.72, "acc_stderr": 0.04512608598542128, "acc_norm": 0.72, "acc_norm_stderr": 0.04512608598542128 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.806060606060606, "acc_stderr": 0.03087414513656209, "acc_norm": 0.806060606060606, "acc_norm_stderr": 0.03087414513656209 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.8686868686868687, "acc_stderr": 0.024063156416822516, "acc_norm": 0.8686868686868687, "acc_norm_stderr": 0.024063156416822516 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.9067357512953368, "acc_stderr": 0.02098685459328973, "acc_norm": 0.9067357512953368, "acc_norm_stderr": 0.02098685459328973 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.6615384615384615, "acc_stderr": 0.023991500500313036, "acc_norm": 0.6615384615384615, "acc_norm_stderr": 0.023991500500313036 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.37407407407407406, "acc_stderr": 0.029502861128955286, "acc_norm": 0.37407407407407406, "acc_norm_stderr": 0.029502861128955286 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.7100840336134454, "acc_stderr": 0.029472485833136088, "acc_norm": 0.7100840336134454, "acc_norm_stderr": 0.029472485833136088 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.3708609271523179, "acc_stderr": 0.03943966699183629, "acc_norm": 0.3708609271523179, "acc_norm_stderr": 0.03943966699183629 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.8477064220183487, "acc_stderr": 0.015405084393157074, "acc_norm": 0.8477064220183487, "acc_norm_stderr": 0.015405084393157074 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.5648148148148148, "acc_stderr": 0.03381200005643527, "acc_norm": 0.5648148148148148, "acc_norm_stderr": 0.03381200005643527 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.8578431372549019, "acc_stderr": 0.02450980392156862, "acc_norm": 0.8578431372549019, "acc_norm_stderr": 0.02450980392156862 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.8481012658227848, "acc_stderr": 0.023363878096632446, "acc_norm": 0.8481012658227848, "acc_norm_stderr": 0.023363878096632446 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.6771300448430493, "acc_stderr": 0.03138147637575499, "acc_norm": 0.6771300448430493, "acc_norm_stderr": 0.03138147637575499 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.7480916030534351, "acc_stderr": 0.03807387116306086, "acc_norm": 0.7480916030534351, "acc_norm_stderr": 0.03807387116306086 }, "harness|hendrycksTest-international_law|5": { "acc": 0.7768595041322314, "acc_stderr": 0.03800754475228733, "acc_norm": 0.7768595041322314, "acc_norm_stderr": 0.03800754475228733 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.8055555555555556, "acc_stderr": 0.038260763248848646, "acc_norm": 0.8055555555555556, "acc_norm_stderr": 0.038260763248848646 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.754601226993865, "acc_stderr": 0.03380939813943354, "acc_norm": 0.754601226993865, "acc_norm_stderr": 0.03380939813943354 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.4642857142857143, "acc_stderr": 0.04733667890053756, "acc_norm": 0.4642857142857143, "acc_norm_stderr": 0.04733667890053756 }, "harness|hendrycksTest-management|5": { "acc": 0.8543689320388349, "acc_stderr": 0.03492606476623791, "acc_norm": 0.8543689320388349, "acc_norm_stderr": 0.03492606476623791 }, "harness|hendrycksTest-marketing|5": { "acc": 0.8547008547008547, "acc_stderr": 0.0230866350868414, "acc_norm": 0.8547008547008547, "acc_norm_stderr": 0.0230866350868414 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.7, "acc_stderr": 0.046056618647183814, "acc_norm": 0.7, "acc_norm_stderr": 0.046056618647183814 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.8071519795657727, "acc_stderr": 0.014108533515757431, "acc_norm": 0.8071519795657727, "acc_norm_stderr": 0.014108533515757431 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.7601156069364162, "acc_stderr": 0.022989592543123567, "acc_norm": 0.7601156069364162, "acc_norm_stderr": 0.022989592543123567 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.38994413407821227, "acc_stderr": 0.01631237662921307, "acc_norm": 0.38994413407821227, "acc_norm_stderr": 0.01631237662921307 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.7549019607843137, "acc_stderr": 0.02463004897982478, "acc_norm": 0.7549019607843137, "acc_norm_stderr": 0.02463004897982478 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.7331189710610932, "acc_stderr": 0.025122637608816643, "acc_norm": 0.7331189710610932, "acc_norm_stderr": 0.025122637608816643 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.7870370370370371, "acc_stderr": 0.0227797190887334, "acc_norm": 0.7870370370370371, "acc_norm_stderr": 0.0227797190887334 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.49645390070921985, "acc_stderr": 0.02982674915328092, "acc_norm": 0.49645390070921985, "acc_norm_stderr": 0.02982674915328092 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.49282920469361147, "acc_stderr": 0.012768922739553308, "acc_norm": 0.49282920469361147, "acc_norm_stderr": 0.012768922739553308 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.7389705882352942, "acc_stderr": 0.026679252270103128, "acc_norm": 0.7389705882352942, "acc_norm_stderr": 0.026679252270103128 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.6813725490196079, "acc_stderr": 0.018850084696468712, "acc_norm": 0.6813725490196079, "acc_norm_stderr": 0.018850084696468712 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.6818181818181818, "acc_stderr": 0.04461272175910509, "acc_norm": 0.6818181818181818, "acc_norm_stderr": 0.04461272175910509 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.7306122448979592, "acc_stderr": 0.02840125202902294, "acc_norm": 0.7306122448979592, "acc_norm_stderr": 0.02840125202902294 }, "harness|hendrycksTest-sociology|5": { "acc": 0.8407960199004975, "acc_stderr": 0.02587064676616913, "acc_norm": 0.8407960199004975, "acc_norm_stderr": 0.02587064676616913 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.9, "acc_stderr": 0.030151134457776334, "acc_norm": 0.9, "acc_norm_stderr": 0.030151134457776334 }, "harness|hendrycksTest-virology|5": { "acc": 0.5843373493975904, "acc_stderr": 0.03836722176598052, "acc_norm": 0.5843373493975904, "acc_norm_stderr": 0.03836722176598052 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.783625730994152, "acc_stderr": 0.03158149539338733, "acc_norm": 0.783625730994152, "acc_norm_stderr": 0.03158149539338733 }, "harness|truthfulqa:mc|0": { "mc1": 0.5716034271725826, "mc1_stderr": 0.017323088597314747, "mc2": 0.7173010784992183, "mc2_stderr": 0.015025206960130984 }, "harness|winogrande|5": { "acc": 0.8374112075769534, "acc_stderr": 0.010370455551343331 }, "harness|gsm8k|5": { "acc": 0.6474601971190296, "acc_stderr": 0.013159909755930333 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_invalid-coder__SOLAR-10.7B-Instruct-SOLARC-M-10.7B-slerp
[ "region:us" ]
2024-02-02T12:43:18+00:00
{"pretty_name": "Evaluation run of invalid-coder/SOLAR-10.7B-Instruct-SOLARC-M-10.7B-slerp", "dataset_summary": "Dataset automatically created during the evaluation run of model [invalid-coder/SOLAR-10.7B-Instruct-SOLARC-M-10.7B-slerp](https://huggingface.co/invalid-coder/SOLAR-10.7B-Instruct-SOLARC-M-10.7B-slerp) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_invalid-coder__SOLAR-10.7B-Instruct-SOLARC-M-10.7B-slerp\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-02-02T12:41:00.359909](https://huggingface.co/datasets/open-llm-leaderboard/details_invalid-coder__SOLAR-10.7B-Instruct-SOLARC-M-10.7B-slerp/blob/main/results_2024-02-02T12-41-00.359909.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.6666138651793191,\n \"acc_stderr\": 0.031637513501573004,\n \"acc_norm\": 0.6674256198470557,\n \"acc_norm_stderr\": 0.03228287649895947,\n \"mc1\": 0.5716034271725826,\n \"mc1_stderr\": 0.017323088597314747,\n \"mc2\": 0.7173010784992183,\n \"mc2_stderr\": 0.015025206960130984\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.6825938566552902,\n \"acc_stderr\": 0.013602239088038167,\n \"acc_norm\": 0.7107508532423208,\n \"acc_norm_stderr\": 0.01325001257939344\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.7120095598486357,\n \"acc_stderr\": 0.004519011688417164,\n \"acc_norm\": 0.8833897629954193,\n \"acc_norm_stderr\": 0.0032029933469910634\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.43,\n \"acc_stderr\": 0.049756985195624284,\n \"acc_norm\": 0.43,\n \"acc_norm_stderr\": 0.049756985195624284\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.6148148148148148,\n \"acc_stderr\": 0.04203921040156279,\n \"acc_norm\": 0.6148148148148148,\n \"acc_norm_stderr\": 0.04203921040156279\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.75,\n \"acc_stderr\": 0.03523807393012047,\n \"acc_norm\": 0.75,\n \"acc_norm_stderr\": 0.03523807393012047\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.74,\n \"acc_stderr\": 0.0440844002276808,\n \"acc_norm\": 0.74,\n \"acc_norm_stderr\": 0.0440844002276808\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.6792452830188679,\n \"acc_stderr\": 0.02872750295788027,\n \"acc_norm\": 0.6792452830188679,\n \"acc_norm_stderr\": 0.02872750295788027\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.7638888888888888,\n \"acc_stderr\": 0.03551446610810826,\n \"acc_norm\": 0.7638888888888888,\n \"acc_norm_stderr\": 0.03551446610810826\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.46,\n \"acc_stderr\": 0.05009082659620333,\n \"acc_norm\": 0.46,\n \"acc_norm_stderr\": 0.05009082659620333\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.52,\n \"acc_stderr\": 0.05021167315686779,\n \"acc_norm\": 0.52,\n \"acc_norm_stderr\": 0.05021167315686779\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.32,\n \"acc_stderr\": 0.046882617226215034,\n \"acc_norm\": 0.32,\n \"acc_norm_stderr\": 0.046882617226215034\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.6647398843930635,\n \"acc_stderr\": 0.03599586301247077,\n \"acc_norm\": 0.6647398843930635,\n \"acc_norm_stderr\": 0.03599586301247077\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.39215686274509803,\n \"acc_stderr\": 0.048580835742663454,\n \"acc_norm\": 0.39215686274509803,\n \"acc_norm_stderr\": 0.048580835742663454\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.76,\n \"acc_stderr\": 0.042923469599092816,\n \"acc_norm\": 0.76,\n \"acc_norm_stderr\": 0.042923469599092816\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.625531914893617,\n \"acc_stderr\": 0.03163910665367291,\n \"acc_norm\": 0.625531914893617,\n \"acc_norm_stderr\": 0.03163910665367291\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.5,\n \"acc_stderr\": 0.047036043419179864,\n \"acc_norm\": 0.5,\n \"acc_norm_stderr\": 0.047036043419179864\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.6275862068965518,\n \"acc_stderr\": 0.04028731532947558,\n \"acc_norm\": 0.6275862068965518,\n \"acc_norm_stderr\": 0.04028731532947558\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.48148148148148145,\n \"acc_stderr\": 0.02573364199183898,\n \"acc_norm\": 0.48148148148148145,\n \"acc_norm_stderr\": 0.02573364199183898\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.4444444444444444,\n \"acc_stderr\": 0.044444444444444495,\n \"acc_norm\": 0.4444444444444444,\n \"acc_norm_stderr\": 0.044444444444444495\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.34,\n \"acc_stderr\": 0.04760952285695235,\n \"acc_norm\": 0.34,\n \"acc_norm_stderr\": 0.04760952285695235\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.8225806451612904,\n \"acc_stderr\": 0.021732540689329286,\n \"acc_norm\": 0.8225806451612904,\n \"acc_norm_stderr\": 0.021732540689329286\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.5123152709359606,\n \"acc_stderr\": 0.035169204442208966,\n \"acc_norm\": 0.5123152709359606,\n \"acc_norm_stderr\": 0.035169204442208966\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.72,\n \"acc_stderr\": 0.04512608598542128,\n \"acc_norm\": 0.72,\n \"acc_norm_stderr\": 0.04512608598542128\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.806060606060606,\n \"acc_stderr\": 0.03087414513656209,\n \"acc_norm\": 0.806060606060606,\n \"acc_norm_stderr\": 0.03087414513656209\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.8686868686868687,\n \"acc_stderr\": 0.024063156416822516,\n \"acc_norm\": 0.8686868686868687,\n \"acc_norm_stderr\": 0.024063156416822516\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.9067357512953368,\n \"acc_stderr\": 0.02098685459328973,\n \"acc_norm\": 0.9067357512953368,\n \"acc_norm_stderr\": 0.02098685459328973\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.6615384615384615,\n \"acc_stderr\": 0.023991500500313036,\n \"acc_norm\": 0.6615384615384615,\n \"acc_norm_stderr\": 0.023991500500313036\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.37407407407407406,\n \"acc_stderr\": 0.029502861128955286,\n \"acc_norm\": 0.37407407407407406,\n \"acc_norm_stderr\": 0.029502861128955286\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.7100840336134454,\n \"acc_stderr\": 0.029472485833136088,\n \"acc_norm\": 0.7100840336134454,\n \"acc_norm_stderr\": 0.029472485833136088\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.3708609271523179,\n \"acc_stderr\": 0.03943966699183629,\n \"acc_norm\": 0.3708609271523179,\n \"acc_norm_stderr\": 0.03943966699183629\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.8477064220183487,\n \"acc_stderr\": 0.015405084393157074,\n \"acc_norm\": 0.8477064220183487,\n \"acc_norm_stderr\": 0.015405084393157074\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.5648148148148148,\n \"acc_stderr\": 0.03381200005643527,\n \"acc_norm\": 0.5648148148148148,\n \"acc_norm_stderr\": 0.03381200005643527\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.8578431372549019,\n \"acc_stderr\": 0.02450980392156862,\n \"acc_norm\": 0.8578431372549019,\n \"acc_norm_stderr\": 0.02450980392156862\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.8481012658227848,\n \"acc_stderr\": 0.023363878096632446,\n \"acc_norm\": 0.8481012658227848,\n \"acc_norm_stderr\": 0.023363878096632446\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.6771300448430493,\n \"acc_stderr\": 0.03138147637575499,\n \"acc_norm\": 0.6771300448430493,\n \"acc_norm_stderr\": 0.03138147637575499\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.7480916030534351,\n \"acc_stderr\": 0.03807387116306086,\n \"acc_norm\": 0.7480916030534351,\n \"acc_norm_stderr\": 0.03807387116306086\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.7768595041322314,\n \"acc_stderr\": 0.03800754475228733,\n \"acc_norm\": 0.7768595041322314,\n \"acc_norm_stderr\": 0.03800754475228733\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.8055555555555556,\n \"acc_stderr\": 0.038260763248848646,\n \"acc_norm\": 0.8055555555555556,\n \"acc_norm_stderr\": 0.038260763248848646\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.754601226993865,\n \"acc_stderr\": 0.03380939813943354,\n \"acc_norm\": 0.754601226993865,\n \"acc_norm_stderr\": 0.03380939813943354\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.4642857142857143,\n \"acc_stderr\": 0.04733667890053756,\n \"acc_norm\": 0.4642857142857143,\n \"acc_norm_stderr\": 0.04733667890053756\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.8543689320388349,\n \"acc_stderr\": 0.03492606476623791,\n \"acc_norm\": 0.8543689320388349,\n \"acc_norm_stderr\": 0.03492606476623791\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8547008547008547,\n \"acc_stderr\": 0.0230866350868414,\n \"acc_norm\": 0.8547008547008547,\n \"acc_norm_stderr\": 0.0230866350868414\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.7,\n \"acc_stderr\": 0.046056618647183814,\n \"acc_norm\": 0.7,\n \"acc_norm_stderr\": 0.046056618647183814\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.8071519795657727,\n \"acc_stderr\": 0.014108533515757431,\n \"acc_norm\": 0.8071519795657727,\n \"acc_norm_stderr\": 0.014108533515757431\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.7601156069364162,\n \"acc_stderr\": 0.022989592543123567,\n \"acc_norm\": 0.7601156069364162,\n \"acc_norm_stderr\": 0.022989592543123567\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.38994413407821227,\n \"acc_stderr\": 0.01631237662921307,\n \"acc_norm\": 0.38994413407821227,\n \"acc_norm_stderr\": 0.01631237662921307\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.7549019607843137,\n \"acc_stderr\": 0.02463004897982478,\n \"acc_norm\": 0.7549019607843137,\n \"acc_norm_stderr\": 0.02463004897982478\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.7331189710610932,\n \"acc_stderr\": 0.025122637608816643,\n \"acc_norm\": 0.7331189710610932,\n \"acc_norm_stderr\": 0.025122637608816643\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.7870370370370371,\n \"acc_stderr\": 0.0227797190887334,\n \"acc_norm\": 0.7870370370370371,\n \"acc_norm_stderr\": 0.0227797190887334\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.49645390070921985,\n \"acc_stderr\": 0.02982674915328092,\n \"acc_norm\": 0.49645390070921985,\n \"acc_norm_stderr\": 0.02982674915328092\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.49282920469361147,\n \"acc_stderr\": 0.012768922739553308,\n \"acc_norm\": 0.49282920469361147,\n \"acc_norm_stderr\": 0.012768922739553308\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.7389705882352942,\n \"acc_stderr\": 0.026679252270103128,\n \"acc_norm\": 0.7389705882352942,\n \"acc_norm_stderr\": 0.026679252270103128\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.6813725490196079,\n \"acc_stderr\": 0.018850084696468712,\n \"acc_norm\": 0.6813725490196079,\n \"acc_norm_stderr\": 0.018850084696468712\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6818181818181818,\n \"acc_stderr\": 0.04461272175910509,\n \"acc_norm\": 0.6818181818181818,\n \"acc_norm_stderr\": 0.04461272175910509\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.7306122448979592,\n \"acc_stderr\": 0.02840125202902294,\n \"acc_norm\": 0.7306122448979592,\n \"acc_norm_stderr\": 0.02840125202902294\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.8407960199004975,\n \"acc_stderr\": 0.02587064676616913,\n \"acc_norm\": 0.8407960199004975,\n \"acc_norm_stderr\": 0.02587064676616913\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.9,\n \"acc_stderr\": 0.030151134457776334,\n \"acc_norm\": 0.9,\n \"acc_norm_stderr\": 0.030151134457776334\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.5843373493975904,\n \"acc_stderr\": 0.03836722176598052,\n \"acc_norm\": 0.5843373493975904,\n \"acc_norm_stderr\": 0.03836722176598052\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.783625730994152,\n \"acc_stderr\": 0.03158149539338733,\n \"acc_norm\": 0.783625730994152,\n \"acc_norm_stderr\": 0.03158149539338733\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.5716034271725826,\n \"mc1_stderr\": 0.017323088597314747,\n \"mc2\": 0.7173010784992183,\n \"mc2_stderr\": 0.015025206960130984\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.8374112075769534,\n \"acc_stderr\": 0.010370455551343331\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.6474601971190296,\n \"acc_stderr\": 0.013159909755930333\n }\n}\n```", "repo_url": "https://huggingface.co/invalid-coder/SOLAR-10.7B-Instruct-SOLARC-M-10.7B-slerp", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_02_02T12_41_00.359909", "path": ["**/details_harness|arc:challenge|25_2024-02-02T12-41-00.359909.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-02-02T12-41-00.359909.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_02_02T12_41_00.359909", "path": ["**/details_harness|gsm8k|5_2024-02-02T12-41-00.359909.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-02-02T12-41-00.359909.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_02_02T12_41_00.359909", "path": ["**/details_harness|hellaswag|10_2024-02-02T12-41-00.359909.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-02-02T12-41-00.359909.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_02_02T12_41_00.359909", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-02T12-41-00.359909.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-02-02T12-41-00.359909.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-02-02T12-41-00.359909.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-02-02T12-41-00.359909.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-02T12-41-00.359909.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-02-02T12-41-00.359909.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-02-02T12-41-00.359909.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-02-02T12-41-00.359909.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-02-02T12-41-00.359909.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-02-02T12-41-00.359909.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-02-02T12-41-00.359909.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-02-02T12-41-00.359909.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-02T12-41-00.359909.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-02-02T12-41-00.359909.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-02T12-41-00.359909.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-02T12-41-00.359909.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-02-02T12-41-00.359909.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-02-02T12-41-00.359909.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-02-02T12-41-00.359909.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-02T12-41-00.359909.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-02T12-41-00.359909.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-02T12-41-00.359909.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-02-02T12-41-00.359909.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-02T12-41-00.359909.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-02T12-41-00.359909.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-02T12-41-00.359909.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-02T12-41-00.359909.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-02-02T12-41-00.359909.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-02T12-41-00.359909.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-02T12-41-00.359909.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-02T12-41-00.359909.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-02T12-41-00.359909.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-02-02T12-41-00.359909.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-02-02T12-41-00.359909.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-02-02T12-41-00.359909.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-02-02T12-41-00.359909.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-02T12-41-00.359909.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-02-02T12-41-00.359909.parquet", "**/details_harness|hendrycksTest-management|5_2024-02-02T12-41-00.359909.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-02-02T12-41-00.359909.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-02-02T12-41-00.359909.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-02-02T12-41-00.359909.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-02-02T12-41-00.359909.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-02T12-41-00.359909.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-02-02T12-41-00.359909.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-02-02T12-41-00.359909.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-02-02T12-41-00.359909.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-02-02T12-41-00.359909.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-02-02T12-41-00.359909.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-02-02T12-41-00.359909.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-02-02T12-41-00.359909.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-02-02T12-41-00.359909.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-02-02T12-41-00.359909.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-02-02T12-41-00.359909.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-02T12-41-00.359909.parquet", "**/details_harness|hendrycksTest-virology|5_2024-02-02T12-41-00.359909.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-02-02T12-41-00.359909.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-02T12-41-00.359909.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-02-02T12-41-00.359909.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-02-02T12-41-00.359909.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-02-02T12-41-00.359909.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-02T12-41-00.359909.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-02-02T12-41-00.359909.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-02-02T12-41-00.359909.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-02-02T12-41-00.359909.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-02-02T12-41-00.359909.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-02-02T12-41-00.359909.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-02-02T12-41-00.359909.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-02-02T12-41-00.359909.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-02T12-41-00.359909.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-02-02T12-41-00.359909.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-02T12-41-00.359909.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-02T12-41-00.359909.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-02-02T12-41-00.359909.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-02-02T12-41-00.359909.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-02-02T12-41-00.359909.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-02T12-41-00.359909.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-02T12-41-00.359909.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-02T12-41-00.359909.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-02-02T12-41-00.359909.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-02T12-41-00.359909.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-02T12-41-00.359909.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-02T12-41-00.359909.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-02T12-41-00.359909.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-02-02T12-41-00.359909.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-02T12-41-00.359909.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-02T12-41-00.359909.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-02T12-41-00.359909.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-02T12-41-00.359909.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-02-02T12-41-00.359909.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-02-02T12-41-00.359909.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-02-02T12-41-00.359909.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-02-02T12-41-00.359909.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-02T12-41-00.359909.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-02-02T12-41-00.359909.parquet", "**/details_harness|hendrycksTest-management|5_2024-02-02T12-41-00.359909.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-02-02T12-41-00.359909.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-02-02T12-41-00.359909.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-02-02T12-41-00.359909.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-02-02T12-41-00.359909.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-02T12-41-00.359909.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-02-02T12-41-00.359909.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-02-02T12-41-00.359909.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-02-02T12-41-00.359909.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-02-02T12-41-00.359909.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-02-02T12-41-00.359909.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-02-02T12-41-00.359909.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-02-02T12-41-00.359909.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-02-02T12-41-00.359909.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-02-02T12-41-00.359909.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-02-02T12-41-00.359909.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-02T12-41-00.359909.parquet", "**/details_harness|hendrycksTest-virology|5_2024-02-02T12-41-00.359909.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-02-02T12-41-00.359909.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_02_02T12_41_00.359909", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-02T12-41-00.359909.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-02T12-41-00.359909.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_02_02T12_41_00.359909", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-02-02T12-41-00.359909.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-02-02T12-41-00.359909.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_02_02T12_41_00.359909", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-02-02T12-41-00.359909.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-02-02T12-41-00.359909.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_02_02T12_41_00.359909", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-02-02T12-41-00.359909.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-02-02T12-41-00.359909.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_02_02T12_41_00.359909", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-02T12-41-00.359909.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-02T12-41-00.359909.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_02_02T12_41_00.359909", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-02-02T12-41-00.359909.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-02-02T12-41-00.359909.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_02_02T12_41_00.359909", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-02-02T12-41-00.359909.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-02-02T12-41-00.359909.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_02_02T12_41_00.359909", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-02-02T12-41-00.359909.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-02-02T12-41-00.359909.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_02_02T12_41_00.359909", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-02-02T12-41-00.359909.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-02-02T12-41-00.359909.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_02_02T12_41_00.359909", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-02-02T12-41-00.359909.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-02-02T12-41-00.359909.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_02_02T12_41_00.359909", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-02-02T12-41-00.359909.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-02-02T12-41-00.359909.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_02_02T12_41_00.359909", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-02-02T12-41-00.359909.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-02-02T12-41-00.359909.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_02_02T12_41_00.359909", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-02T12-41-00.359909.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-02T12-41-00.359909.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_02_02T12_41_00.359909", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-02-02T12-41-00.359909.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-02-02T12-41-00.359909.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_02_02T12_41_00.359909", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-02T12-41-00.359909.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-02T12-41-00.359909.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_02_02T12_41_00.359909", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-02T12-41-00.359909.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-02T12-41-00.359909.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_02_02T12_41_00.359909", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-02-02T12-41-00.359909.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-02-02T12-41-00.359909.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_02_02T12_41_00.359909", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-02-02T12-41-00.359909.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-02-02T12-41-00.359909.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_02_02T12_41_00.359909", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-02-02T12-41-00.359909.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-02-02T12-41-00.359909.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_02_02T12_41_00.359909", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-02T12-41-00.359909.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-02T12-41-00.359909.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_02_02T12_41_00.359909", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-02T12-41-00.359909.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-02T12-41-00.359909.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_02_02T12_41_00.359909", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-02T12-41-00.359909.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-02T12-41-00.359909.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_02_02T12_41_00.359909", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-02-02T12-41-00.359909.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-02-02T12-41-00.359909.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_02_02T12_41_00.359909", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-02T12-41-00.359909.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-02T12-41-00.359909.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_02_02T12_41_00.359909", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-02T12-41-00.359909.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-02T12-41-00.359909.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_02_02T12_41_00.359909", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-02T12-41-00.359909.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-02T12-41-00.359909.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_02_02T12_41_00.359909", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-02T12-41-00.359909.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-02T12-41-00.359909.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_02_02T12_41_00.359909", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-02-02T12-41-00.359909.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-02-02T12-41-00.359909.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_02_02T12_41_00.359909", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-02T12-41-00.359909.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-02T12-41-00.359909.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_02_02T12_41_00.359909", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-02T12-41-00.359909.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-02T12-41-00.359909.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_02_02T12_41_00.359909", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-02T12-41-00.359909.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-02T12-41-00.359909.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_02_02T12_41_00.359909", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-02T12-41-00.359909.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-02T12-41-00.359909.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_02_02T12_41_00.359909", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-02-02T12-41-00.359909.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-02-02T12-41-00.359909.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_02_02T12_41_00.359909", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-02-02T12-41-00.359909.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-02-02T12-41-00.359909.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_02_02T12_41_00.359909", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-02-02T12-41-00.359909.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-02-02T12-41-00.359909.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_02_02T12_41_00.359909", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-02-02T12-41-00.359909.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-02-02T12-41-00.359909.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_02_02T12_41_00.359909", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-02T12-41-00.359909.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-02T12-41-00.359909.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_02_02T12_41_00.359909", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-02-02T12-41-00.359909.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-02-02T12-41-00.359909.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_02_02T12_41_00.359909", "path": ["**/details_harness|hendrycksTest-management|5_2024-02-02T12-41-00.359909.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-02-02T12-41-00.359909.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_02_02T12_41_00.359909", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-02-02T12-41-00.359909.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-02-02T12-41-00.359909.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_02_02T12_41_00.359909", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-02-02T12-41-00.359909.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-02-02T12-41-00.359909.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_02_02T12_41_00.359909", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-02-02T12-41-00.359909.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-02-02T12-41-00.359909.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_02_02T12_41_00.359909", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-02-02T12-41-00.359909.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-02-02T12-41-00.359909.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_02_02T12_41_00.359909", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-02T12-41-00.359909.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-02T12-41-00.359909.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_02_02T12_41_00.359909", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-02-02T12-41-00.359909.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-02-02T12-41-00.359909.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_02_02T12_41_00.359909", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-02-02T12-41-00.359909.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-02-02T12-41-00.359909.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_02_02T12_41_00.359909", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-02-02T12-41-00.359909.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-02-02T12-41-00.359909.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_02_02T12_41_00.359909", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-02-02T12-41-00.359909.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-02-02T12-41-00.359909.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_02_02T12_41_00.359909", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-02-02T12-41-00.359909.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-02-02T12-41-00.359909.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_02_02T12_41_00.359909", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-02-02T12-41-00.359909.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-02-02T12-41-00.359909.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_02_02T12_41_00.359909", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-02-02T12-41-00.359909.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-02-02T12-41-00.359909.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_02_02T12_41_00.359909", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-02-02T12-41-00.359909.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-02-02T12-41-00.359909.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_02_02T12_41_00.359909", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-02-02T12-41-00.359909.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-02-02T12-41-00.359909.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_02_02T12_41_00.359909", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-02-02T12-41-00.359909.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-02-02T12-41-00.359909.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_02_02T12_41_00.359909", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-02T12-41-00.359909.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-02T12-41-00.359909.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_02_02T12_41_00.359909", "path": ["**/details_harness|hendrycksTest-virology|5_2024-02-02T12-41-00.359909.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-02-02T12-41-00.359909.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_02_02T12_41_00.359909", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-02-02T12-41-00.359909.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-02-02T12-41-00.359909.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_02_02T12_41_00.359909", "path": ["**/details_harness|truthfulqa:mc|0_2024-02-02T12-41-00.359909.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-02-02T12-41-00.359909.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_02_02T12_41_00.359909", "path": ["**/details_harness|winogrande|5_2024-02-02T12-41-00.359909.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-02-02T12-41-00.359909.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_02_02T12_41_00.359909", "path": ["results_2024-02-02T12-41-00.359909.parquet"]}, {"split": "latest", "path": ["results_2024-02-02T12-41-00.359909.parquet"]}]}]}
2024-02-02T12:43:44+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of invalid-coder/SOLAR-10.7B-Instruct-SOLARC-M-10.7B-slerp Dataset automatically created during the evaluation run of model invalid-coder/SOLAR-10.7B-Instruct-SOLARC-M-10.7B-slerp on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-02-02T12:41:00.359909(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of invalid-coder/SOLAR-10.7B-Instruct-SOLARC-M-10.7B-slerp\n\n\n\nDataset automatically created during the evaluation run of model invalid-coder/SOLAR-10.7B-Instruct-SOLARC-M-10.7B-slerp on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-02-02T12:41:00.359909(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of invalid-coder/SOLAR-10.7B-Instruct-SOLARC-M-10.7B-slerp\n\n\n\nDataset automatically created during the evaluation run of model invalid-coder/SOLAR-10.7B-Instruct-SOLARC-M-10.7B-slerp on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-02-02T12:41:00.359909(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
75e09b2de6b30a833fda7295c78263925c5d6f26
# Daumier Caricatures Dataset Welcome to the Daumier Caricatures Dataset, a distinctive collection of 67 public domain lithographic caricatures by the celebrated artist Honoré Daumier. This dataset, featuring 'big head' caricatures, provides a valuable asset for training AI in the recognition and generation of satirical artwork. [![Discord](https://img.shields.io/discord/1091306623819059300?color=7289da&label=Discord&logo=discord&logoColor=fff&style=for-the-badge)](https://discord.com/invite/m3TBB9XEkb) ## Dataset Overview - **Content**: The dataset comprises 67 lithographs, showcasing Honoré Daumier's renowned caricatures that offer a humorous and critical perspective of 19th-century French society. - **Source**: Each image is a public domain work sourced from the National Gallery of Art, ensuring unrestricted access and use for educational and research purposes. - **Usage**: Primarily designed for AI models specializing in art history, pattern recognition in caricatures, and satirical art generation. ## Licensing - The original lithographs by Honoré Daumier are in the public domain, provided by the National Gallery of Art. However, this curated dataset, which includes the lithographs and GPT-Vision generated captions, is available under the Creative Commons Attribution-NonCommercial 2.0 Generic (CC BY-NC 2.0) license. This license permits non-commercial use, provided proper attribution is given, and the use is not for commercial purposes. - To learn more about this license, please visit [CC BY-NC 2.0 License details](https://creativecommons.org/licenses/by-nc/2.0/). ## Dataset Composition Each caricature in the dataset is complemented by a descriptive caption generated by GPT-Vision. These captions are tailored to enhance the AI's understanding of the artworks during training, with a focus on the distinctive elements that characterize Daumier's style. ## How to Use the Dataset 1. **Download the Dataset**: Access the dataset through the provided link for use in AI model training in non-commercial settings. 2. **Examine the Caricatures and Captions**: Review the images and their captions to gain insight into the nuances of Daumier's satirical art. 3. **Train Your AI Model**: Apply the dataset to train AI models capable of recognizing and emulating the unique characteristics of lithographic caricature art. ## Contributions and Feedback Your insights and additions are invaluable to us. We encourage feedback and contributions, such as providing additional images or improved captions, to enrich the dataset further. Your input will facilitate ongoing improvements for the AI and art history communities. --- The Daumier Caricatures Dataset serves as an essential tool for those looking to delve into the intersection of AI and art history, providing a foundation for understanding and creating satirical art in the vein of Honoré Daumier.
Blib-la/honore_daumier_dataset
[ "license:cc-by-nc-2.0", "region:us" ]
2024-02-02T12:45:31+00:00
{"license": "cc-by-nc-2.0", "viewer": false}
2024-02-02T14:25:56+00:00
[]
[]
TAGS #license-cc-by-nc-2.0 #region-us
# Daumier Caricatures Dataset Welcome to the Daumier Caricatures Dataset, a distinctive collection of 67 public domain lithographic caricatures by the celebrated artist Honoré Daumier. This dataset, featuring 'big head' caricatures, provides a valuable asset for training AI in the recognition and generation of satirical artwork. ![Discord](URL ## Dataset Overview - Content: The dataset comprises 67 lithographs, showcasing Honoré Daumier's renowned caricatures that offer a humorous and critical perspective of 19th-century French society. - Source: Each image is a public domain work sourced from the National Gallery of Art, ensuring unrestricted access and use for educational and research purposes. - Usage: Primarily designed for AI models specializing in art history, pattern recognition in caricatures, and satirical art generation. ## Licensing - The original lithographs by Honoré Daumier are in the public domain, provided by the National Gallery of Art. However, this curated dataset, which includes the lithographs and GPT-Vision generated captions, is available under the Creative Commons Attribution-NonCommercial 2.0 Generic (CC BY-NC 2.0) license. This license permits non-commercial use, provided proper attribution is given, and the use is not for commercial purposes. - To learn more about this license, please visit CC BY-NC 2.0 License details. ## Dataset Composition Each caricature in the dataset is complemented by a descriptive caption generated by GPT-Vision. These captions are tailored to enhance the AI's understanding of the artworks during training, with a focus on the distinctive elements that characterize Daumier's style. ## How to Use the Dataset 1. Download the Dataset: Access the dataset through the provided link for use in AI model training in non-commercial settings. 2. Examine the Caricatures and Captions: Review the images and their captions to gain insight into the nuances of Daumier's satirical art. 3. Train Your AI Model: Apply the dataset to train AI models capable of recognizing and emulating the unique characteristics of lithographic caricature art. ## Contributions and Feedback Your insights and additions are invaluable to us. We encourage feedback and contributions, such as providing additional images or improved captions, to enrich the dataset further. Your input will facilitate ongoing improvements for the AI and art history communities. --- The Daumier Caricatures Dataset serves as an essential tool for those looking to delve into the intersection of AI and art history, providing a foundation for understanding and creating satirical art in the vein of Honoré Daumier.
[ "# Daumier Caricatures Dataset\n\nWelcome to the Daumier Caricatures Dataset, a distinctive collection of 67 public domain lithographic caricatures by the celebrated artist Honoré Daumier. This dataset, featuring 'big head' caricatures, provides a valuable asset for training AI in the recognition and generation of satirical artwork.\n\n![Discord](URL", "## Dataset Overview\n\n- Content: The dataset comprises 67 lithographs, showcasing Honoré Daumier's renowned caricatures that offer a humorous and critical perspective of 19th-century French society.\n- Source: Each image is a public domain work sourced from the National Gallery of Art, ensuring unrestricted access and use for educational and research purposes.\n- Usage: Primarily designed for AI models specializing in art history, pattern recognition in caricatures, and satirical art generation.", "## Licensing\n\n- The original lithographs by Honoré Daumier are in the public domain, provided by the National Gallery of Art. However, this curated dataset, which includes the lithographs and GPT-Vision generated captions, is available under the Creative Commons Attribution-NonCommercial 2.0 Generic (CC BY-NC 2.0) license. This license permits non-commercial use, provided proper attribution is given, and the use is not for commercial purposes.\n- To learn more about this license, please visit CC BY-NC 2.0 License details.", "## Dataset Composition\n\nEach caricature in the dataset is complemented by a descriptive caption generated by GPT-Vision. These captions are tailored to enhance the AI's understanding of the artworks during training, with a focus on the distinctive elements that characterize Daumier's style.", "## How to Use the Dataset\n\n1. Download the Dataset: Access the dataset through the provided link for use in AI model training in non-commercial settings.\n2. Examine the Caricatures and Captions: Review the images and their captions to gain insight into the nuances of Daumier's satirical art.\n3. Train Your AI Model: Apply the dataset to train AI models capable of recognizing and emulating the unique characteristics of lithographic caricature art.", "## Contributions and Feedback\n\nYour insights and additions are invaluable to us. We encourage feedback and contributions, such as providing additional images or improved captions, to enrich the dataset further. Your input will facilitate ongoing improvements for the AI and art history communities.\n\n---\n\nThe Daumier Caricatures Dataset serves as an essential tool for those looking to delve into the intersection of AI and art history, providing a foundation for understanding and creating satirical art in the vein of Honoré Daumier." ]
[ "TAGS\n#license-cc-by-nc-2.0 #region-us \n", "# Daumier Caricatures Dataset\n\nWelcome to the Daumier Caricatures Dataset, a distinctive collection of 67 public domain lithographic caricatures by the celebrated artist Honoré Daumier. This dataset, featuring 'big head' caricatures, provides a valuable asset for training AI in the recognition and generation of satirical artwork.\n\n![Discord](URL", "## Dataset Overview\n\n- Content: The dataset comprises 67 lithographs, showcasing Honoré Daumier's renowned caricatures that offer a humorous and critical perspective of 19th-century French society.\n- Source: Each image is a public domain work sourced from the National Gallery of Art, ensuring unrestricted access and use for educational and research purposes.\n- Usage: Primarily designed for AI models specializing in art history, pattern recognition in caricatures, and satirical art generation.", "## Licensing\n\n- The original lithographs by Honoré Daumier are in the public domain, provided by the National Gallery of Art. However, this curated dataset, which includes the lithographs and GPT-Vision generated captions, is available under the Creative Commons Attribution-NonCommercial 2.0 Generic (CC BY-NC 2.0) license. This license permits non-commercial use, provided proper attribution is given, and the use is not for commercial purposes.\n- To learn more about this license, please visit CC BY-NC 2.0 License details.", "## Dataset Composition\n\nEach caricature in the dataset is complemented by a descriptive caption generated by GPT-Vision. These captions are tailored to enhance the AI's understanding of the artworks during training, with a focus on the distinctive elements that characterize Daumier's style.", "## How to Use the Dataset\n\n1. Download the Dataset: Access the dataset through the provided link for use in AI model training in non-commercial settings.\n2. Examine the Caricatures and Captions: Review the images and their captions to gain insight into the nuances of Daumier's satirical art.\n3. Train Your AI Model: Apply the dataset to train AI models capable of recognizing and emulating the unique characteristics of lithographic caricature art.", "## Contributions and Feedback\n\nYour insights and additions are invaluable to us. We encourage feedback and contributions, such as providing additional images or improved captions, to enrich the dataset further. Your input will facilitate ongoing improvements for the AI and art history communities.\n\n---\n\nThe Daumier Caricatures Dataset serves as an essential tool for those looking to delve into the intersection of AI and art history, providing a foundation for understanding and creating satirical art in the vein of Honoré Daumier." ]
b723c6f3832aaa61d04f8f2a8859d44c5a331e6e
# OpenHermes-2.5-DPO-binarized-alpha > A DPO dataset built with [distilabel](https://github.com/argilla-io/distilabel) atop the awesome [OpenHermes-2.5 dataset](https://huggingface.co/datasets/teknium/OpenHermes-2.5). > This is an alpha version with a small sample to collect feedback from the community. It follows a fully OSS approach, using PairRM for preference selection instead of OpenAI models <div> <img src="https://cdn-uploads.huggingface.co/production/uploads/60420dccc15e823a685f2b03/fEGA3vMnZE2tjJsOeB6hF.webp"> </div> <p align="center"> <a href="https://github.com/argilla-io/distilabel"> <img src="https://raw.githubusercontent.com/argilla-io/distilabel/main/docs/assets/distilabel-badge-light.png" alt="Built with Distilabel" width="200" height="32"/> </a> </p> ## How to use this dataset This how you can prepare your data for preference tuning a `chatml`-compatible model: ```python def chatml_format(example): # Format system system = "" # Format instruction prompt = tokenizer.apply_chat_template(example["chosen"][:-1], tokenize=False, add_generation_prompt=True) # Format chosen answer chosen = example["chosen"][-1]["content"] + "<|im_end|>\n" # Format rejected answer rejected = example["rejected"][-1]["content"] + "<|im_end|>\n" return { "prompt": system + prompt, "chosen": chosen, "rejected": rejected, } # Tokenizer tokenizer = AutoTokenizer.from_pretrained(model_name) tokenizer.pad_token = tokenizer.eos_token tokenizer.padding_side = "left" dataset = load_dataset("argilla/openhermes2.5-dpo-binarized-alpha") # Save columns original_columns = dataset.column_names # Format dataset dataset = dataset.map( chatml_format, remove_columns=original_columns['train'] ) ``` ## How we've built this dataset ### Generate responses using vLLM and `Nous-Hermes-2-Yi-34B` This step generates one response to single-turn examples in the dataset. We use `Nous-Hermes-2-Yi-34B`, but you can use any other model of your choice with this recipe. ```python from distilabel.llm import vLLM from distilabel.tasks import TextGenerationTask from distilabel.pipeline import Pipeline from distilabel.dataset import DatasetCheckpoint from datasets import load_dataset from pathlib import Path from vllm import LLM def preprocess(r): return { "input": r["conversations"][0]["value"] } hermes = load_dataset("teknium/OpenHermes-2.5", split="train[0:10000]") hermes = hermes.filter( lambda r: len(r["conversations"])==2 ).map(preprocess) hermes = hermes.shuffle().select(range(100)) dataset_checkpoint = DatasetCheckpoint(path=Path.cwd() / "checkpoint", save_frequency=10000) llm = vLLM( model=LLM(model="NousResearch/Nous-Hermes-2-Yi-34B"), task=TextGenerationTask(), prompt_format="chatml", max_new_tokens=512 ) pipeline = Pipeline(generator=llm) dataset = pipeline.generate( hermes, num_generations=1, display_progress_bar=True, checkpoint_strategy=dataset_checkpoint, batch_size=8 ) dataset.push_to_hub("argilla/openhermes2.5-dpo") ``` ### Preferences using PairRM Instead of taking a naive approach where we assume `Nous-Hermes-2-Yi-34B` will always be worse, we use `PairRM` to rank both the original response and the new response from `Nous-Hermes-2-Yi-34B`. This results in the following chosen/rejected distribution (for the train split): ![image/png](https://cdn-uploads.huggingface.co/production/uploads/60420dccc15e823a685f2b03/yc9_c3Hb0YSHgBGWOzPO5.png) ```python import random import llm_blender def add_fields(r): original_response = r["conversations"][1]["value"] Nous_Hermes_2_Yi_34B = r["generations"][0] indices = [0, 1] random.shuffle(indices) responses = [original_response, Nous_Hermes_2_Yi_34B][indices[0]], [original_response, Nous_Hermes_2_Yi_34B][indices[1]] models = ["original_response", "Nous_Hermes_2_Yi_34B"][indices[0]], ["original_response", "Nous_Hermes_2_Yi_34B"][indices[1]] return { "input": r["conversations"][0]["value"], "generations": responses, "generation_model": models } dataset = dataset.map(add_fields) blender = llm_blender.Blender() blender.loadranker("llm-blender/PairRM") batch_size = 4 def compute_rewards(b): return { "rating": blender.rank( b["input"], b["generations"], return_scores=True, batch_size=batch_size ) } scored_dataset = dataset.map( compute_rewards, batched=True, batch_size=batch_size, ) def chosen_rejected(r): # Find indices of max and min values in the ratings list max_idx = r["rating"].index(max(r["rating"])) min_idx = r["rating"].index(min(r["rating"])) # Use indices to pick chosen and rejected responses and models chosen = r["generations"][max_idx] rejected = r["generations"][min_idx] chosen_model = r["generation_model"][max_idx] rejected_model = r["generation_model"][min_idx] return { "chosen": chosen, "rejected": rejected, "chosen_model": chosen_model, "rejected_model": rejected_model, "rejected_score": r["rating"][min_idx], "chosen_score": r["rating"][max_idx], } ds = scored_dataset.filter(lambda r: r['rating'][0]!=r['rating'][1]).map(chosen_rejected) ds.push_to_hub("argilla/openhermes2.5-dpo-binarized") ```
argilla/OpenHermes2.5-dpo-binarized-alpha
[ "synthetic", "distilabel", "rlaif", "rlhf", "dpo", "region:us" ]
2024-02-02T12:53:12+00:00
{"dataset_info": {"features": [{"name": "hash", "dtype": "null"}, {"name": "avatarUrl", "dtype": "null"}, {"name": "model", "dtype": "null"}, {"name": "category", "dtype": "string"}, {"name": "views", "dtype": "null"}, {"name": "system_prompt", "dtype": "null"}, {"name": "model_name", "dtype": "null"}, {"name": "language", "dtype": "null"}, {"name": "id", "dtype": "null"}, {"name": "skip_prompt_formatting", "dtype": "bool"}, {"name": "custom_instruction", "dtype": "null"}, {"name": "topic", "dtype": "null"}, {"name": "title", "dtype": "null"}, {"name": "idx", "dtype": "null"}, {"name": "source", "dtype": "string"}, {"name": "conversations", "list": [{"name": "from", "dtype": "string"}, {"name": "value", "dtype": "string"}, {"name": "weight", "dtype": "null"}]}, {"name": "input", "dtype": "string"}, {"name": "generation_model", "sequence": "string"}, {"name": "generation_prompt", "sequence": "string"}, {"name": "raw_generation_responses", "sequence": "string"}, {"name": "generations", "sequence": "string"}, {"name": "rating", "sequence": "float32"}, {"name": "chosen", "list": [{"name": "content", "dtype": "string"}, {"name": "role", "dtype": "string"}]}, {"name": "rejected", "list": [{"name": "content", "dtype": "string"}, {"name": "role", "dtype": "string"}]}, {"name": "chosen_model", "dtype": "string"}, {"name": "rejected_model", "dtype": "string"}, {"name": "rejected_score", "dtype": "float64"}, {"name": "chosen_score", "dtype": "float64"}], "splits": [{"name": "train", "num_bytes": 85831620.35596855, "num_examples": 8813}, {"name": "test", "num_bytes": 9544421.64403145, "num_examples": 980}], "download_size": 50892554, "dataset_size": 95376042}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}, {"split": "test", "path": "data/test-*"}]}], "tags": ["synthetic", "distilabel", "rlaif", "rlhf", "dpo"]}
2024-02-10T09:24:08+00:00
[]
[]
TAGS #synthetic #distilabel #rlaif #rlhf #dpo #region-us
# OpenHermes-2.5-DPO-binarized-alpha > A DPO dataset built with distilabel atop the awesome OpenHermes-2.5 dataset. > This is an alpha version with a small sample to collect feedback from the community. It follows a fully OSS approach, using PairRM for preference selection instead of OpenAI models <div> <img src="URL </div> <p align="center"> <a href="URL <img src="URL alt="Built with Distilabel" width="200" height="32"/> </a> </p> ## How to use this dataset This how you can prepare your data for preference tuning a 'chatml'-compatible model: ## How we've built this dataset ### Generate responses using vLLM and 'Nous-Hermes-2-Yi-34B' This step generates one response to single-turn examples in the dataset. We use 'Nous-Hermes-2-Yi-34B', but you can use any other model of your choice with this recipe. ### Preferences using PairRM Instead of taking a naive approach where we assume 'Nous-Hermes-2-Yi-34B' will always be worse, we use 'PairRM' to rank both the original response and the new response from 'Nous-Hermes-2-Yi-34B'. This results in the following chosen/rejected distribution (for the train split): !image/png
[ "# OpenHermes-2.5-DPO-binarized-alpha\n\n> A DPO dataset built with distilabel atop the awesome OpenHermes-2.5 dataset.\n\n> This is an alpha version with a small sample to collect feedback from the community. It follows a fully OSS approach, using PairRM for preference selection instead of OpenAI models\n\n\n<div>\n <img src=\"URL\n</div>\n\n<p align=\"center\">\n <a href=\"URL\n <img src=\"URL alt=\"Built with Distilabel\" width=\"200\" height=\"32\"/>\n </a>\n</p>", "## How to use this dataset\nThis how you can prepare your data for preference tuning a 'chatml'-compatible model:", "## How we've built this dataset", "### Generate responses using vLLM and 'Nous-Hermes-2-Yi-34B'\n\nThis step generates one response to single-turn examples in the dataset. We use 'Nous-Hermes-2-Yi-34B', but you can use any other model of your choice with this recipe.", "### Preferences using PairRM\nInstead of taking a naive approach where we assume 'Nous-Hermes-2-Yi-34B' will always be worse, we use 'PairRM' to rank both the original response and the new response from 'Nous-Hermes-2-Yi-34B'.\n\nThis results in the following chosen/rejected distribution (for the train split):\n\n!image/png" ]
[ "TAGS\n#synthetic #distilabel #rlaif #rlhf #dpo #region-us \n", "# OpenHermes-2.5-DPO-binarized-alpha\n\n> A DPO dataset built with distilabel atop the awesome OpenHermes-2.5 dataset.\n\n> This is an alpha version with a small sample to collect feedback from the community. It follows a fully OSS approach, using PairRM for preference selection instead of OpenAI models\n\n\n<div>\n <img src=\"URL\n</div>\n\n<p align=\"center\">\n <a href=\"URL\n <img src=\"URL alt=\"Built with Distilabel\" width=\"200\" height=\"32\"/>\n </a>\n</p>", "## How to use this dataset\nThis how you can prepare your data for preference tuning a 'chatml'-compatible model:", "## How we've built this dataset", "### Generate responses using vLLM and 'Nous-Hermes-2-Yi-34B'\n\nThis step generates one response to single-turn examples in the dataset. We use 'Nous-Hermes-2-Yi-34B', but you can use any other model of your choice with this recipe.", "### Preferences using PairRM\nInstead of taking a naive approach where we assume 'Nous-Hermes-2-Yi-34B' will always be worse, we use 'PairRM' to rank both the original response and the new response from 'Nous-Hermes-2-Yi-34B'.\n\nThis results in the following chosen/rejected distribution (for the train split):\n\n!image/png" ]
ee28553799d182ffc78bdb47a045f51dd50343c6
The **EASI** dataset comprises 10 sequences, including 2 with clear conditions, 3 with medium conditions, and 5 with turbid conditions. Please see more on the [Github](https://github.com/Jinghe-mel/UFEN-SLAM). ## Citation ```bibtex @INPROCEEDINGS{10161047, author={Yang, Jinghe and Gong, Mingming and Nair, Girish and Lee, Jung Hoon and Monty, Jason and Pu, Ye}, booktitle={2023 IEEE International Conference on Robotics and Automation (ICRA)}, title={Knowledge Distillation for Feature Extraction in Underwater VSLAM}, year={2023}, doi={10.1109/ICRA48891.2023.10161047}} ```
Jinghey/EASI_Dataset
[ "license:apache-2.0", "region:us" ]
2024-02-02T13:25:31+00:00
{"license": "apache-2.0", "viewer": false}
2024-02-15T09:25:26+00:00
[]
[]
TAGS #license-apache-2.0 #region-us
The EASI dataset comprises 10 sequences, including 2 with clear conditions, 3 with medium conditions, and 5 with turbid conditions. Please see more on the Github.
[]
[ "TAGS\n#license-apache-2.0 #region-us \n" ]
0d8769d599e21afe6b3ac13cb95bba0e406fbaef
Data comes from [here](https://www.nature.com/articles/s41597-022-01369-4)
profoz/people
[ "region:us" ]
2024-02-02T13:37:31+00:00
{}
2024-02-02T14:01:39+00:00
[]
[]
TAGS #region-us
Data comes from here
[]
[ "TAGS\n#region-us \n" ]
c593fdd5dfdb5ce7e0a0b22325c61a5fe517e12e
# Dataset Card for "open-toolformer-retrieval-multi-neg-result-new-kw" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
severo/open-toolformer-retrieval-multi-neg-result-new-kw
[ "language:en", "region:us" ]
2024-02-02T14:01:23+00:00
{"language": ["en"], "dataset_info": {"features": [{"name": "system_prompt", "dtype": "string"}, {"name": "question", "dtype": "string"}, {"name": "response", "dtype": "string"}, {"name": "meta", "struct": [{"name": "first_search_rank", "dtype": "int64"}, {"name": "second_search", "dtype": "bool"}, {"name": "second_search_success", "dtype": "bool"}, {"name": "source", "dtype": "string"}]}], "splits": [{"name": "train", "num_bytes": 58584517, "num_examples": 30043}], "download_size": 23248813, "dataset_size": 58584517}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}]}
2024-02-07T07:58:29+00:00
[]
[ "en" ]
TAGS #language-English #region-us
# Dataset Card for "open-toolformer-retrieval-multi-neg-result-new-kw" More Information needed
[ "# Dataset Card for \"open-toolformer-retrieval-multi-neg-result-new-kw\"\n\nMore Information needed" ]
[ "TAGS\n#language-English #region-us \n", "# Dataset Card for \"open-toolformer-retrieval-multi-neg-result-new-kw\"\n\nMore Information needed" ]
28ccc86631cdccbcd41a385ee990040ef7f46f58
# Dataset Card for "lmind_nq_train1000_eval500_v1_qa" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
tyzhu/lmind_nq_train1000_eval500_v1_qa
[ "region:us" ]
2024-02-02T14:01:37+00:00
{"configs": [{"config_name": "default", "data_files": [{"split": "train_qa", "path": "data/train_qa-*"}, {"split": "train_recite_qa", "path": "data/train_recite_qa-*"}, {"split": "eval_qa", "path": "data/eval_qa-*"}, {"split": "eval_recite_qa", "path": "data/eval_recite_qa-*"}, {"split": "all_docs", "path": "data/all_docs-*"}, {"split": "all_docs_eval", "path": "data/all_docs_eval-*"}, {"split": "train", "path": "data/train-*"}, {"split": "validation", "path": "data/validation-*"}]}], "dataset_info": {"features": [{"name": "answers", "struct": [{"name": "answer_start", "sequence": "null"}, {"name": "text", "sequence": "string"}]}, {"name": "inputs", "dtype": "string"}, {"name": "targets", "dtype": "string"}], "splits": [{"name": "train_qa", "num_bytes": 115608, "num_examples": 1000}, {"name": "train_recite_qa", "num_bytes": 755758, "num_examples": 1000}, {"name": "eval_qa", "num_bytes": 58285, "num_examples": 500}, {"name": "eval_recite_qa", "num_bytes": 377880, "num_examples": 500}, {"name": "all_docs", "num_bytes": 950316, "num_examples": 1462}, {"name": "all_docs_eval", "num_bytes": 950216, "num_examples": 1462}, {"name": "train", "num_bytes": 115608, "num_examples": 1000}, {"name": "validation", "num_bytes": 58285, "num_examples": 500}], "download_size": 2124112, "dataset_size": 3381956}}
2024-02-02T14:02:00+00:00
[]
[]
TAGS #region-us
# Dataset Card for "lmind_nq_train1000_eval500_v1_qa" More Information needed
[ "# Dataset Card for \"lmind_nq_train1000_eval500_v1_qa\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"lmind_nq_train1000_eval500_v1_qa\"\n\nMore Information needed" ]
1fde01910bb5a7334835462e644157cf0333543e
# Dataset Card for "lmind_nq_train1000_eval500_v1_doc" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
tyzhu/lmind_nq_train1000_eval500_v1_doc
[ "region:us" ]
2024-02-02T14:02:00+00:00
{"configs": [{"config_name": "default", "data_files": [{"split": "train_qa", "path": "data/train_qa-*"}, {"split": "train_recite_qa", "path": "data/train_recite_qa-*"}, {"split": "eval_qa", "path": "data/eval_qa-*"}, {"split": "eval_recite_qa", "path": "data/eval_recite_qa-*"}, {"split": "all_docs", "path": "data/all_docs-*"}, {"split": "all_docs_eval", "path": "data/all_docs_eval-*"}, {"split": "train", "path": "data/train-*"}, {"split": "validation", "path": "data/validation-*"}]}], "dataset_info": {"features": [{"name": "answers", "struct": [{"name": "answer_start", "sequence": "null"}, {"name": "text", "sequence": "string"}]}, {"name": "inputs", "dtype": "string"}, {"name": "targets", "dtype": "string"}], "splits": [{"name": "train_qa", "num_bytes": 115608, "num_examples": 1000}, {"name": "train_recite_qa", "num_bytes": 755758, "num_examples": 1000}, {"name": "eval_qa", "num_bytes": 58285, "num_examples": 500}, {"name": "eval_recite_qa", "num_bytes": 377880, "num_examples": 500}, {"name": "all_docs", "num_bytes": 950316, "num_examples": 1462}, {"name": "all_docs_eval", "num_bytes": 950216, "num_examples": 1462}, {"name": "train", "num_bytes": 950316, "num_examples": 1462}, {"name": "validation", "num_bytes": 950316, "num_examples": 1462}], "download_size": 3216664, "dataset_size": 5108695}}
2024-02-02T14:02:24+00:00
[]
[]
TAGS #region-us
# Dataset Card for "lmind_nq_train1000_eval500_v1_doc" More Information needed
[ "# Dataset Card for \"lmind_nq_train1000_eval500_v1_doc\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"lmind_nq_train1000_eval500_v1_doc\"\n\nMore Information needed" ]
8edf77d888dc82b28901bb52d50547798055bb30
# Dataset Card for "lmind_nq_train1000_eval500_v1_docidx" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
tyzhu/lmind_nq_train1000_eval500_v1_docidx
[ "region:us" ]
2024-02-02T14:02:28+00:00
{"configs": [{"config_name": "default", "data_files": [{"split": "train_qa", "path": "data/train_qa-*"}, {"split": "train_recite_qa", "path": "data/train_recite_qa-*"}, {"split": "eval_qa", "path": "data/eval_qa-*"}, {"split": "eval_recite_qa", "path": "data/eval_recite_qa-*"}, {"split": "all_docs", "path": "data/all_docs-*"}, {"split": "all_docs_eval", "path": "data/all_docs_eval-*"}, {"split": "train", "path": "data/train-*"}, {"split": "validation", "path": "data/validation-*"}]}], "dataset_info": {"features": [{"name": "answers", "struct": [{"name": "answer_start", "sequence": "null"}, {"name": "text", "sequence": "string"}]}, {"name": "inputs", "dtype": "string"}, {"name": "targets", "dtype": "string"}], "splits": [{"name": "train_qa", "num_bytes": 115608, "num_examples": 1000}, {"name": "train_recite_qa", "num_bytes": 755758, "num_examples": 1000}, {"name": "eval_qa", "num_bytes": 58285, "num_examples": 500}, {"name": "eval_recite_qa", "num_bytes": 377880, "num_examples": 500}, {"name": "all_docs", "num_bytes": 950316, "num_examples": 1462}, {"name": "all_docs_eval", "num_bytes": 950216, "num_examples": 1462}, {"name": "train", "num_bytes": 950316, "num_examples": 1462}, {"name": "validation", "num_bytes": 950216, "num_examples": 1462}], "download_size": 3228354, "dataset_size": 5108595}}
2024-02-02T14:02:59+00:00
[]
[]
TAGS #region-us
# Dataset Card for "lmind_nq_train1000_eval500_v1_docidx" More Information needed
[ "# Dataset Card for \"lmind_nq_train1000_eval500_v1_docidx\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"lmind_nq_train1000_eval500_v1_docidx\"\n\nMore Information needed" ]
b488bbeafccb69a28c4f17d0f60456bdfb5461b1
# Dataset Card for "lmind_nq_train1000_eval500_v1_doc_qa" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
tyzhu/lmind_nq_train1000_eval500_v1_doc_qa
[ "region:us" ]
2024-02-02T14:02:59+00:00
{"configs": [{"config_name": "default", "data_files": [{"split": "train_qa", "path": "data/train_qa-*"}, {"split": "train_recite_qa", "path": "data/train_recite_qa-*"}, {"split": "eval_qa", "path": "data/eval_qa-*"}, {"split": "eval_recite_qa", "path": "data/eval_recite_qa-*"}, {"split": "all_docs", "path": "data/all_docs-*"}, {"split": "all_docs_eval", "path": "data/all_docs_eval-*"}, {"split": "train", "path": "data/train-*"}, {"split": "validation", "path": "data/validation-*"}]}], "dataset_info": {"features": [{"name": "answers", "struct": [{"name": "answer_start", "sequence": "null"}, {"name": "text", "sequence": "string"}]}, {"name": "inputs", "dtype": "string"}, {"name": "targets", "dtype": "string"}], "splits": [{"name": "train_qa", "num_bytes": 115608, "num_examples": 1000}, {"name": "train_recite_qa", "num_bytes": 755758, "num_examples": 1000}, {"name": "eval_qa", "num_bytes": 58285, "num_examples": 500}, {"name": "eval_recite_qa", "num_bytes": 377880, "num_examples": 500}, {"name": "all_docs", "num_bytes": 950316, "num_examples": 1462}, {"name": "all_docs_eval", "num_bytes": 950216, "num_examples": 1462}, {"name": "train", "num_bytes": 1065924, "num_examples": 2462}, {"name": "validation", "num_bytes": 58285, "num_examples": 500}], "download_size": 2722791, "dataset_size": 4332272}}
2024-02-02T14:03:21+00:00
[]
[]
TAGS #region-us
# Dataset Card for "lmind_nq_train1000_eval500_v1_doc_qa" More Information needed
[ "# Dataset Card for \"lmind_nq_train1000_eval500_v1_doc_qa\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"lmind_nq_train1000_eval500_v1_doc_qa\"\n\nMore Information needed" ]
19a7c4ae212986493b046f082c5625130aa8acb9
# Dataset Card for "lmind_nq_train1000_eval500_v1_recite_qa" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
tyzhu/lmind_nq_train1000_eval500_v1_recite_qa
[ "region:us" ]
2024-02-02T14:03:22+00:00
{"configs": [{"config_name": "default", "data_files": [{"split": "train_qa", "path": "data/train_qa-*"}, {"split": "train_recite_qa", "path": "data/train_recite_qa-*"}, {"split": "eval_qa", "path": "data/eval_qa-*"}, {"split": "eval_recite_qa", "path": "data/eval_recite_qa-*"}, {"split": "all_docs", "path": "data/all_docs-*"}, {"split": "all_docs_eval", "path": "data/all_docs_eval-*"}, {"split": "train", "path": "data/train-*"}, {"split": "validation", "path": "data/validation-*"}]}], "dataset_info": {"features": [{"name": "answers", "struct": [{"name": "answer_start", "sequence": "null"}, {"name": "text", "sequence": "string"}]}, {"name": "inputs", "dtype": "string"}, {"name": "targets", "dtype": "string"}], "splits": [{"name": "train_qa", "num_bytes": 115608, "num_examples": 1000}, {"name": "train_recite_qa", "num_bytes": 755758, "num_examples": 1000}, {"name": "eval_qa", "num_bytes": 58285, "num_examples": 500}, {"name": "eval_recite_qa", "num_bytes": 377880, "num_examples": 500}, {"name": "all_docs", "num_bytes": 950316, "num_examples": 1462}, {"name": "all_docs_eval", "num_bytes": 950216, "num_examples": 1462}, {"name": "train", "num_bytes": 1706074, "num_examples": 2462}, {"name": "validation", "num_bytes": 377880, "num_examples": 500}], "download_size": 3311953, "dataset_size": 5292017}}
2024-02-02T14:03:45+00:00
[]
[]
TAGS #region-us
# Dataset Card for "lmind_nq_train1000_eval500_v1_recite_qa" More Information needed
[ "# Dataset Card for \"lmind_nq_train1000_eval500_v1_recite_qa\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"lmind_nq_train1000_eval500_v1_recite_qa\"\n\nMore Information needed" ]
6dd9b1c5608b5b473ecbe665f961212829d030bf
# Dataset Card for "lmind_nq_train1000_eval500_v1_reciteonly_qa" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
tyzhu/lmind_nq_train1000_eval500_v1_reciteonly_qa
[ "region:us" ]
2024-02-02T14:03:46+00:00
{"configs": [{"config_name": "default", "data_files": [{"split": "train_qa", "path": "data/train_qa-*"}, {"split": "train_recite_qa", "path": "data/train_recite_qa-*"}, {"split": "eval_qa", "path": "data/eval_qa-*"}, {"split": "eval_recite_qa", "path": "data/eval_recite_qa-*"}, {"split": "all_docs", "path": "data/all_docs-*"}, {"split": "all_docs_eval", "path": "data/all_docs_eval-*"}, {"split": "train", "path": "data/train-*"}, {"split": "validation", "path": "data/validation-*"}]}], "dataset_info": {"features": [{"name": "answers", "struct": [{"name": "answer_start", "sequence": "null"}, {"name": "text", "sequence": "string"}]}, {"name": "inputs", "dtype": "string"}, {"name": "targets", "dtype": "string"}], "splits": [{"name": "train_qa", "num_bytes": 115608, "num_examples": 1000}, {"name": "train_recite_qa", "num_bytes": 755758, "num_examples": 1000}, {"name": "eval_qa", "num_bytes": 58285, "num_examples": 500}, {"name": "eval_recite_qa", "num_bytes": 377880, "num_examples": 500}, {"name": "all_docs", "num_bytes": 950316, "num_examples": 1462}, {"name": "all_docs_eval", "num_bytes": 950216, "num_examples": 1462}, {"name": "train", "num_bytes": 755758, "num_examples": 1000}, {"name": "validation", "num_bytes": 377880, "num_examples": 500}], "download_size": 2713438, "dataset_size": 4341701}}
2024-02-02T14:04:07+00:00
[]
[]
TAGS #region-us
# Dataset Card for "lmind_nq_train1000_eval500_v1_reciteonly_qa" More Information needed
[ "# Dataset Card for \"lmind_nq_train1000_eval500_v1_reciteonly_qa\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"lmind_nq_train1000_eval500_v1_reciteonly_qa\"\n\nMore Information needed" ]
723c02f13c9ca344a1dd6b2dc21f68746773d70c
# Dataset Card for Dataset Name A dataset of 3233 corporate sustainability reports with scope 1, 2 and 3 greenhouse gas emissions extracted by [Mixtral-8x7B-v0.1](https://huggingface.co/mistralai/Mixtral-8x7B-v0.1). The model output also includes the pages in the report which are used for the prediction. This dataset card aims to be a base template for new datasets. It has been generated using [this raw template](https://github.com/huggingface/huggingface_hub/blob/main/src/huggingface_hub/templates/datasetcard_template.md?plain=1). ## Dataset Details ### Dataset Description Together with the [corporate-emission-reports](https://huggingface.co/datasets/nopperl/corporate-emission-reports) dataset, this dataset exists for the long-context structured information extraction task of extracting scope 1, 2 and 3 greenhouse gas emissions from corporate sustainability reports. While the [corporate-emission-reports](https://huggingface.co/datasets/nopperl/corporate-emission-reports) dataset is intended for the evaluation of automatic extraction systems and contains a small amount of manually extracted data, this dataset is intended to train/finetune (weak) models and contains a larger amount of amount of data extracted using the [Mixtral-8x7B-v0.1](https://huggingface.co/mistralai/Mixtral-8x7B-v0.1) language model, which achieves an accuracy of 68\% on the [corporate-emission-reports](https://huggingface.co/datasets/nopperl/corporate-emission-reports) dataset. - **License:** Open Data Commons Public Domain Dedication and License (PDDL) ### Dataset Sources [optional] - **Repository:** https://github.com/nopperl/corporate_emission_reports - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses This dataset is intended to train or finetune models to extract machine-readable greenhouse gas emission data from sustainability reports. ### Direct Use The main intended use case is training/finetuning decoder-only language models. Note, that the dataset only includes URLs to the reports and not the plain text itself. Hence, for training these models, it is required to download and preprocess the data. This can be done using the scripts provided at the [GitHub repo](https://github.com/nopperl/corporate_emission_reports). A preprocessed version of this dataset is provided at [sustainability-report-emissions-instruction-style](https://huggingface.co/datasets/nopperl/sustainability-report-emissions-instruction-style). An example of a model finetuned on this dataset is [emissions-extraction-lora](https://huggingface.co/nopperl/emissions-extraction-lora). ## Dataset Structure - `id` (string): unique instance id, ex. "ASX_ABC_2019". No specific structure is enforced. - `url` (string): the URL to the sustainability report PDF. - `sha256` (string): SHA-256 hash string of the report PDF to ensure the integrity of downloaded files. - `scope_1` (double): total scope 1 emissions in metric tonnes of CO2eq. - `scope_2` (double): total market-based scope 2 emissions in metric tonnes of CO2eq. - `scope_3` (double): total scope 3 emissions in metric tonnes of CO2eq. - `sources` (list<int>): set of pages containing emission data. ## Dataset Creation ### Curation Rationale The dataset was created to attempt to improve the performance of weaker models based on the output of a stronger model. ### Source Data The dataset is based on sustainability reports from corporations in Europe, North America and Australia. #### Data Collection and Processing The sustainability reports are sourced from ResponsibilityReports.com. All report PDF's from corporations listed on the [ASX](https://www.responsibilityreports.com/Companies?exch=7), [LSE](https://www.responsibilityreports.com/Companies?exch=9), [NASDAQ](https://www.responsibilityreports.com/Companies?exch=2) and [TSX](https://www.responsibilityreports.com/Companies?exch=5) are downloaded from this site. Importantly, this does not include corporations listed on the [NYSE](https://www.responsibilityreports.com/Companies?exch=1), a subset of which is included in the evaluation dataset. Documents which do not contain the terms `scope 1`, `scope 2` or `scope 3` are discarded as they likely do not contain emission data. In total, this leaves 3233 reports. #### Who are the source data producers? The sustainability reports are produced by corporations themselves and optionally verified by third parties. Thus, they only contain self-reported emission information. ### Annotations [optional] The sustainability reports are annotated with emission data extracted using the [Mixtral-8x7B-v0.1](https://huggingface.co/mistralai/Mixtral-8x7B-v0.1) language model, which is the main purpose of this dataset. #### Annotation process The automatic exctraction is run using [llama.cpp](https://github.com/ggerganov/llama.cpp) and the Q5_K_M quantized version of [Mixtral-8x7B-v0.1](https://huggingface.co/mistralai/Mixtral-8x7B-v0.1). The Mixtral model is chosen as it was one of the strongest open source models at the time of creation. The quantization variant was chosen to fit the available GPU memory while retaining enough free space for long sequences. The model recieves an instruction and text extracted from pages of a report as input and produces a JSON object. Conformance to a consistent JSON schema is enforced using llama.cpp's BNF grammar-based decoding. The JSON object contains the source 1, 2 and 3 emissions and a list of page sources. For prompts which are longer than the model's context length of 32768, [self-extend](https://arxiv.org/abs/2401.01325) is used with a window size of 2048 and a neighbour size of 8. An example prompt for the [2020 Corporate Social Responsibility Report by Walsin Lihwa Corp.](https://www.responsibilityreports.com/HostedData/ResponsibilityReportArchive/w/ASX_61HG_2020.pdf) is provided at `example-prompt.txt`. The completion by Mixtral based on this input is: `{"scope_1":190316,"scope_2":null,"scope_3":null,"sources":[163]}` #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information The dataset contains only public information. ## Bias, Risks, and Limitations It is important to note that the emission data is extracted using a language model and is in no way verified. Keep in mind that the model used for extraction achieves an accuracy of 68\% on the [corporate-emission-reports](https://huggingface.co/datasets/nopperl/corporate-emission-reports) dataset. ## Citation [optional] **BibTeX:** [More Information Needed]
nopperl/sustainability-report-emissions
[ "size_categories:1K<n<10K", "language:en", "license:pddl", "climate", "arxiv:2401.01325", "region:us" ]
2024-02-02T14:11:22+00:00
{"language": ["en"], "license": "pddl", "size_categories": ["1K<n<10K"], "tags": ["climate"]}
2024-02-03T19:40:44+00:00
[ "2401.01325" ]
[ "en" ]
TAGS #size_categories-1K<n<10K #language-English #license-pddl #climate #arxiv-2401.01325 #region-us
# Dataset Card for Dataset Name A dataset of 3233 corporate sustainability reports with scope 1, 2 and 3 greenhouse gas emissions extracted by Mixtral-8x7B-v0.1. The model output also includes the pages in the report which are used for the prediction. This dataset card aims to be a base template for new datasets. It has been generated using this raw template. ## Dataset Details ### Dataset Description Together with the corporate-emission-reports dataset, this dataset exists for the long-context structured information extraction task of extracting scope 1, 2 and 3 greenhouse gas emissions from corporate sustainability reports. While the corporate-emission-reports dataset is intended for the evaluation of automatic extraction systems and contains a small amount of manually extracted data, this dataset is intended to train/finetune (weak) models and contains a larger amount of amount of data extracted using the Mixtral-8x7B-v0.1 language model, which achieves an accuracy of 68\% on the corporate-emission-reports dataset. - License: Open Data Commons Public Domain Dedication and License (PDDL) ### Dataset Sources [optional] - Repository: URL - Paper [optional]: - Demo [optional]: ## Uses This dataset is intended to train or finetune models to extract machine-readable greenhouse gas emission data from sustainability reports. ### Direct Use The main intended use case is training/finetuning decoder-only language models. Note, that the dataset only includes URLs to the reports and not the plain text itself. Hence, for training these models, it is required to download and preprocess the data. This can be done using the scripts provided at the GitHub repo. A preprocessed version of this dataset is provided at sustainability-report-emissions-instruction-style. An example of a model finetuned on this dataset is emissions-extraction-lora. ## Dataset Structure - 'id' (string): unique instance id, ex. "ASX_ABC_2019". No specific structure is enforced. - 'url' (string): the URL to the sustainability report PDF. - 'sha256' (string): SHA-256 hash string of the report PDF to ensure the integrity of downloaded files. - 'scope_1' (double): total scope 1 emissions in metric tonnes of CO2eq. - 'scope_2' (double): total market-based scope 2 emissions in metric tonnes of CO2eq. - 'scope_3' (double): total scope 3 emissions in metric tonnes of CO2eq. - 'sources' (list<int>): set of pages containing emission data. ## Dataset Creation ### Curation Rationale The dataset was created to attempt to improve the performance of weaker models based on the output of a stronger model. ### Source Data The dataset is based on sustainability reports from corporations in Europe, North America and Australia. #### Data Collection and Processing The sustainability reports are sourced from URL. All report PDF's from corporations listed on the ASX, LSE, NASDAQ and TSX are downloaded from this site. Importantly, this does not include corporations listed on the NYSE, a subset of which is included in the evaluation dataset. Documents which do not contain the terms 'scope 1', 'scope 2' or 'scope 3' are discarded as they likely do not contain emission data. In total, this leaves 3233 reports. #### Who are the source data producers? The sustainability reports are produced by corporations themselves and optionally verified by third parties. Thus, they only contain self-reported emission information. ### Annotations [optional] The sustainability reports are annotated with emission data extracted using the Mixtral-8x7B-v0.1 language model, which is the main purpose of this dataset. #### Annotation process The automatic exctraction is run using URL and the Q5_K_M quantized version of Mixtral-8x7B-v0.1. The Mixtral model is chosen as it was one of the strongest open source models at the time of creation. The quantization variant was chosen to fit the available GPU memory while retaining enough free space for long sequences. The model recieves an instruction and text extracted from pages of a report as input and produces a JSON object. Conformance to a consistent JSON schema is enforced using URL's BNF grammar-based decoding. The JSON object contains the source 1, 2 and 3 emissions and a list of page sources. For prompts which are longer than the model's context length of 32768, self-extend is used with a window size of 2048 and a neighbour size of 8. An example prompt for the 2020 Corporate Social Responsibility Report by Walsin Lihwa Corp. is provided at 'URL'. The completion by Mixtral based on this input is: '{"scope_1":190316,"scope_2":null,"scope_3":null,"sources":[163]}' #### Who are the annotators? #### Personal and Sensitive Information The dataset contains only public information. ## Bias, Risks, and Limitations It is important to note that the emission data is extracted using a language model and is in no way verified. Keep in mind that the model used for extraction achieves an accuracy of 68\% on the corporate-emission-reports dataset. [optional] BibTeX:
[ "# Dataset Card for Dataset Name\n\nA dataset of 3233 corporate sustainability reports with scope 1, 2 and 3 greenhouse gas emissions extracted by Mixtral-8x7B-v0.1. The model output also includes the pages in the report which are used for the prediction.\n\nThis dataset card aims to be a base template for new datasets. It has been generated using this raw template.", "## Dataset Details", "### Dataset Description\n\nTogether with the corporate-emission-reports dataset, this dataset exists for the long-context structured information extraction task of extracting scope 1, 2 and 3 greenhouse gas emissions from corporate sustainability reports.\n\nWhile the corporate-emission-reports dataset is intended for the evaluation of automatic extraction systems and contains a small amount of manually extracted data, this dataset is intended to train/finetune (weak) models and contains a larger amount of amount of data extracted using the Mixtral-8x7B-v0.1 language model, which achieves an accuracy of 68\\% on the corporate-emission-reports dataset.\n\n\n- License: Open Data Commons Public Domain Dedication and License (PDDL)", "### Dataset Sources [optional]\n\n\n- Repository: URL\n- Paper [optional]: \n- Demo [optional]:", "## Uses\n\nThis dataset is intended to train or finetune models to extract machine-readable greenhouse gas emission data from sustainability reports.", "### Direct Use\n\nThe main intended use case is training/finetuning decoder-only language models. Note, that the dataset only includes URLs to the reports and not the plain text itself. Hence, for training these models, it is required to download and preprocess the data. This can be done using the scripts provided at the GitHub repo. A preprocessed version of this dataset is provided at sustainability-report-emissions-instruction-style. An example of a model finetuned on this dataset is emissions-extraction-lora.", "## Dataset Structure\n\n- 'id' (string): unique instance id, ex. \"ASX_ABC_2019\". No specific structure is enforced.\n- 'url' (string): the URL to the sustainability report PDF.\n- 'sha256' (string): SHA-256 hash string of the report PDF to ensure the integrity of downloaded files.\n- 'scope_1' (double): total scope 1 emissions in metric tonnes of CO2eq.\n- 'scope_2' (double): total market-based scope 2 emissions in metric tonnes of CO2eq.\n- 'scope_3' (double): total scope 3 emissions in metric tonnes of CO2eq.\n- 'sources' (list<int>): set of pages containing emission data.", "## Dataset Creation", "### Curation Rationale\n\nThe dataset was created to attempt to improve the performance of weaker models based on the output of a stronger model.", "### Source Data\n\nThe dataset is based on sustainability reports from corporations in Europe, North America and Australia.", "#### Data Collection and Processing\n\nThe sustainability reports are sourced from URL. All report PDF's from corporations listed on the ASX, LSE, NASDAQ and TSX are downloaded from this site. Importantly, this does not include corporations listed on the NYSE, a subset of which is included in the evaluation dataset. Documents which do not contain the terms 'scope 1', 'scope 2' or 'scope 3' are discarded as they likely do not contain emission data. In total, this leaves 3233 reports.", "#### Who are the source data producers?\n\nThe sustainability reports are produced by corporations themselves and optionally verified by third parties. Thus, they only contain self-reported emission information.", "### Annotations [optional]\n\nThe sustainability reports are annotated with emission data extracted using the Mixtral-8x7B-v0.1 language model, which is the main purpose of this dataset.", "#### Annotation process\n\nThe automatic exctraction is run using URL and the Q5_K_M quantized version of Mixtral-8x7B-v0.1. The Mixtral model is chosen as it was one of the strongest open source models at the time of creation. The quantization variant was chosen to fit the available GPU memory while retaining enough free space for long sequences.\n\nThe model recieves an instruction and text extracted from pages of a report as input and produces a JSON object. Conformance to a consistent JSON schema is enforced using URL's BNF grammar-based decoding. The JSON object contains the source 1, 2 and 3 emissions and a list of page sources. For prompts which are longer than the model's context length of 32768, self-extend is used with a window size of 2048 and a neighbour size of 8. An example prompt for the 2020 Corporate Social Responsibility Report by Walsin Lihwa Corp. is provided at 'URL'. The completion by Mixtral based on this input is:\n\n'{\"scope_1\":190316,\"scope_2\":null,\"scope_3\":null,\"sources\":[163]}'", "#### Who are the annotators?", "#### Personal and Sensitive Information\n\nThe dataset contains only public information.", "## Bias, Risks, and Limitations\n\nIt is important to note that the emission data is extracted using a language model and is in no way verified. Keep in mind that the model used for extraction achieves an accuracy of 68\\% on the corporate-emission-reports dataset.\n\n[optional]\n\nBibTeX:" ]
[ "TAGS\n#size_categories-1K<n<10K #language-English #license-pddl #climate #arxiv-2401.01325 #region-us \n", "# Dataset Card for Dataset Name\n\nA dataset of 3233 corporate sustainability reports with scope 1, 2 and 3 greenhouse gas emissions extracted by Mixtral-8x7B-v0.1. The model output also includes the pages in the report which are used for the prediction.\n\nThis dataset card aims to be a base template for new datasets. It has been generated using this raw template.", "## Dataset Details", "### Dataset Description\n\nTogether with the corporate-emission-reports dataset, this dataset exists for the long-context structured information extraction task of extracting scope 1, 2 and 3 greenhouse gas emissions from corporate sustainability reports.\n\nWhile the corporate-emission-reports dataset is intended for the evaluation of automatic extraction systems and contains a small amount of manually extracted data, this dataset is intended to train/finetune (weak) models and contains a larger amount of amount of data extracted using the Mixtral-8x7B-v0.1 language model, which achieves an accuracy of 68\\% on the corporate-emission-reports dataset.\n\n\n- License: Open Data Commons Public Domain Dedication and License (PDDL)", "### Dataset Sources [optional]\n\n\n- Repository: URL\n- Paper [optional]: \n- Demo [optional]:", "## Uses\n\nThis dataset is intended to train or finetune models to extract machine-readable greenhouse gas emission data from sustainability reports.", "### Direct Use\n\nThe main intended use case is training/finetuning decoder-only language models. Note, that the dataset only includes URLs to the reports and not the plain text itself. Hence, for training these models, it is required to download and preprocess the data. This can be done using the scripts provided at the GitHub repo. A preprocessed version of this dataset is provided at sustainability-report-emissions-instruction-style. An example of a model finetuned on this dataset is emissions-extraction-lora.", "## Dataset Structure\n\n- 'id' (string): unique instance id, ex. \"ASX_ABC_2019\". No specific structure is enforced.\n- 'url' (string): the URL to the sustainability report PDF.\n- 'sha256' (string): SHA-256 hash string of the report PDF to ensure the integrity of downloaded files.\n- 'scope_1' (double): total scope 1 emissions in metric tonnes of CO2eq.\n- 'scope_2' (double): total market-based scope 2 emissions in metric tonnes of CO2eq.\n- 'scope_3' (double): total scope 3 emissions in metric tonnes of CO2eq.\n- 'sources' (list<int>): set of pages containing emission data.", "## Dataset Creation", "### Curation Rationale\n\nThe dataset was created to attempt to improve the performance of weaker models based on the output of a stronger model.", "### Source Data\n\nThe dataset is based on sustainability reports from corporations in Europe, North America and Australia.", "#### Data Collection and Processing\n\nThe sustainability reports are sourced from URL. All report PDF's from corporations listed on the ASX, LSE, NASDAQ and TSX are downloaded from this site. Importantly, this does not include corporations listed on the NYSE, a subset of which is included in the evaluation dataset. Documents which do not contain the terms 'scope 1', 'scope 2' or 'scope 3' are discarded as they likely do not contain emission data. In total, this leaves 3233 reports.", "#### Who are the source data producers?\n\nThe sustainability reports are produced by corporations themselves and optionally verified by third parties. Thus, they only contain self-reported emission information.", "### Annotations [optional]\n\nThe sustainability reports are annotated with emission data extracted using the Mixtral-8x7B-v0.1 language model, which is the main purpose of this dataset.", "#### Annotation process\n\nThe automatic exctraction is run using URL and the Q5_K_M quantized version of Mixtral-8x7B-v0.1. The Mixtral model is chosen as it was one of the strongest open source models at the time of creation. The quantization variant was chosen to fit the available GPU memory while retaining enough free space for long sequences.\n\nThe model recieves an instruction and text extracted from pages of a report as input and produces a JSON object. Conformance to a consistent JSON schema is enforced using URL's BNF grammar-based decoding. The JSON object contains the source 1, 2 and 3 emissions and a list of page sources. For prompts which are longer than the model's context length of 32768, self-extend is used with a window size of 2048 and a neighbour size of 8. An example prompt for the 2020 Corporate Social Responsibility Report by Walsin Lihwa Corp. is provided at 'URL'. The completion by Mixtral based on this input is:\n\n'{\"scope_1\":190316,\"scope_2\":null,\"scope_3\":null,\"sources\":[163]}'", "#### Who are the annotators?", "#### Personal and Sensitive Information\n\nThe dataset contains only public information.", "## Bias, Risks, and Limitations\n\nIt is important to note that the emission data is extracted using a language model and is in no way verified. Keep in mind that the model used for extraction achieves an accuracy of 68\\% on the corporate-emission-reports dataset.\n\n[optional]\n\nBibTeX:" ]
369a77c41b1d0ab2c53add9877fafa0f9e085640
The [sustainability-report-emissions](https://huggingface.co/datasets/nopperl/sustainability-report-emissions) dataset converted into instruction-style JSONL format for direct consumption by [SFTTrainer](https://huggingface.co/docs/trl/main/en/sft_trainer), [axolotl](https://github.com/OpenAccess-AI-Collective/axolotl), etc. The prompt consists of an instruction and text extracted from relevant pages of a sustainability report. The output is generated using the [Mixtral-8x7B-v0.1](https://huggingface.co/mistralai/Mixtral-8x7B-v0.1) model and consists of a JSON string containing the scope 1, 2 and 3 emissions as well as the ids of pages containing this information. The dataset generation scripts are at [this GitHub repo](https://github.com/nopperl/corporate_emission_reports). An example model finetuned on this dataset is [emissions-extraction-lora](https://huggingface.co/nopperl/emissions-extraction-lora). Note: the prompts do not use any instruction format. It is recommended to process them according to the instruction format your base model was trained on. See the configuration of [emissions-extraction-lora](https://huggingface.co/nopperl/emissions-extraction-lora). Also, as the prompts are rather long (~15000 tokens on average), a significant amount of memory is required for training.
nopperl/sustainability-report-emissions-instruction-style
[ "task_categories:text-generation", "size_categories:1K<n<10K", "language:en", "license:pddl", "climate", "region:us" ]
2024-02-02T14:16:31+00:00
{"language": ["en"], "license": "pddl", "size_categories": ["1K<n<10K"], "task_categories": ["text-generation"], "tags": ["climate"]}
2024-02-07T09:39:24+00:00
[]
[ "en" ]
TAGS #task_categories-text-generation #size_categories-1K<n<10K #language-English #license-pddl #climate #region-us
The sustainability-report-emissions dataset converted into instruction-style JSONL format for direct consumption by SFTTrainer, axolotl, etc. The prompt consists of an instruction and text extracted from relevant pages of a sustainability report. The output is generated using the Mixtral-8x7B-v0.1 model and consists of a JSON string containing the scope 1, 2 and 3 emissions as well as the ids of pages containing this information. The dataset generation scripts are at this GitHub repo. An example model finetuned on this dataset is emissions-extraction-lora. Note: the prompts do not use any instruction format. It is recommended to process them according to the instruction format your base model was trained on. See the configuration of emissions-extraction-lora. Also, as the prompts are rather long (~15000 tokens on average), a significant amount of memory is required for training.
[]
[ "TAGS\n#task_categories-text-generation #size_categories-1K<n<10K #language-English #license-pddl #climate #region-us \n" ]
1d26296cb94ec350156d1f0eb0f22253163e38ac
# Dataset Card for Evaluation run of JaeyeonKang/CCK_Gony_v3.3 <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [JaeyeonKang/CCK_Gony_v3.3](https://huggingface.co/JaeyeonKang/CCK_Gony_v3.3) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_JaeyeonKang__CCK_Gony_v3.3", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-02-02T14:16:13.968146](https://huggingface.co/datasets/open-llm-leaderboard/details_JaeyeonKang__CCK_Gony_v3.3/blob/main/results_2024-02-02T14-16-13.968146.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.7127245389126519, "acc_stderr": 0.030324827741987404, "acc_norm": 0.7169197048023546, "acc_norm_stderr": 0.030907560438510125, "mc1": 0.5250917992656059, "mc1_stderr": 0.017481446804104017, "mc2": 0.6741286843212236, "mc2_stderr": 0.015021442699186793 }, "harness|arc:challenge|25": { "acc": 0.674061433447099, "acc_stderr": 0.013697432466693242, "acc_norm": 0.7039249146757679, "acc_norm_stderr": 0.013340916085246252 }, "harness|hellaswag|10": { "acc": 0.6921927902808206, "acc_stderr": 0.004606429684604527, "acc_norm": 0.8788090021907986, "acc_norm_stderr": 0.0032568214188573178 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.37, "acc_stderr": 0.048523658709391, "acc_norm": 0.37, "acc_norm_stderr": 0.048523658709391 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.6814814814814815, "acc_stderr": 0.04024778401977108, "acc_norm": 0.6814814814814815, "acc_norm_stderr": 0.04024778401977108 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.7960526315789473, "acc_stderr": 0.032790004063100495, "acc_norm": 0.7960526315789473, "acc_norm_stderr": 0.032790004063100495 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.76, "acc_stderr": 0.04292346959909283, "acc_norm": 0.76, "acc_norm_stderr": 0.04292346959909283 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.7811320754716982, "acc_stderr": 0.0254478638251086, "acc_norm": 0.7811320754716982, "acc_norm_stderr": 0.0254478638251086 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.7986111111111112, "acc_stderr": 0.033536474697138406, "acc_norm": 0.7986111111111112, "acc_norm_stderr": 0.033536474697138406 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.54, "acc_stderr": 0.05009082659620332, "acc_norm": 0.54, "acc_norm_stderr": 0.05009082659620332 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.57, "acc_stderr": 0.04975698519562428, "acc_norm": 0.57, "acc_norm_stderr": 0.04975698519562428 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.48, "acc_stderr": 0.050211673156867795, "acc_norm": 0.48, "acc_norm_stderr": 0.050211673156867795 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.7456647398843931, "acc_stderr": 0.0332055644308557, "acc_norm": 0.7456647398843931, "acc_norm_stderr": 0.0332055644308557 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.4411764705882353, "acc_stderr": 0.049406356306056595, "acc_norm": 0.4411764705882353, "acc_norm_stderr": 0.049406356306056595 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.81, "acc_stderr": 0.039427724440366234, "acc_norm": 0.81, "acc_norm_stderr": 0.039427724440366234 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.6723404255319149, "acc_stderr": 0.03068302084323101, "acc_norm": 0.6723404255319149, "acc_norm_stderr": 0.03068302084323101 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.6228070175438597, "acc_stderr": 0.04559522141958216, "acc_norm": 0.6228070175438597, "acc_norm_stderr": 0.04559522141958216 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.6620689655172414, "acc_stderr": 0.039417076320648906, "acc_norm": 0.6620689655172414, "acc_norm_stderr": 0.039417076320648906 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.4947089947089947, "acc_stderr": 0.02574986828855657, "acc_norm": 0.4947089947089947, "acc_norm_stderr": 0.02574986828855657 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.5634920634920635, "acc_stderr": 0.04435932892851466, "acc_norm": 0.5634920634920635, "acc_norm_stderr": 0.04435932892851466 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.43, "acc_stderr": 0.049756985195624284, "acc_norm": 0.43, "acc_norm_stderr": 0.049756985195624284 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.8419354838709677, "acc_stderr": 0.020752831511875278, "acc_norm": 0.8419354838709677, "acc_norm_stderr": 0.020752831511875278 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.6009852216748769, "acc_stderr": 0.034454876862647144, "acc_norm": 0.6009852216748769, "acc_norm_stderr": 0.034454876862647144 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.75, "acc_stderr": 0.04351941398892446, "acc_norm": 0.75, "acc_norm_stderr": 0.04351941398892446 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.7818181818181819, "acc_stderr": 0.03225078108306289, "acc_norm": 0.7818181818181819, "acc_norm_stderr": 0.03225078108306289 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.8636363636363636, "acc_stderr": 0.024450155973189835, "acc_norm": 0.8636363636363636, "acc_norm_stderr": 0.024450155973189835 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.9481865284974094, "acc_stderr": 0.01599622932024412, "acc_norm": 0.9481865284974094, "acc_norm_stderr": 0.01599622932024412 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.7025641025641025, "acc_stderr": 0.023177408131465942, "acc_norm": 0.7025641025641025, "acc_norm_stderr": 0.023177408131465942 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.37777777777777777, "acc_stderr": 0.02956070739246572, "acc_norm": 0.37777777777777777, "acc_norm_stderr": 0.02956070739246572 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.7941176470588235, "acc_stderr": 0.026265024608275882, "acc_norm": 0.7941176470588235, "acc_norm_stderr": 0.026265024608275882 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.46357615894039733, "acc_stderr": 0.04071636065944215, "acc_norm": 0.46357615894039733, "acc_norm_stderr": 0.04071636065944215 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.8899082568807339, "acc_stderr": 0.013419939018681203, "acc_norm": 0.8899082568807339, "acc_norm_stderr": 0.013419939018681203 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.6111111111111112, "acc_stderr": 0.033247089118091176, "acc_norm": 0.6111111111111112, "acc_norm_stderr": 0.033247089118091176 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.8529411764705882, "acc_stderr": 0.02485747808025045, "acc_norm": 0.8529411764705882, "acc_norm_stderr": 0.02485747808025045 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.8523206751054853, "acc_stderr": 0.02309432958259569, "acc_norm": 0.8523206751054853, "acc_norm_stderr": 0.02309432958259569 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.7713004484304933, "acc_stderr": 0.028188240046929203, "acc_norm": 0.7713004484304933, "acc_norm_stderr": 0.028188240046929203 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.8091603053435115, "acc_stderr": 0.034465133507525975, "acc_norm": 0.8091603053435115, "acc_norm_stderr": 0.034465133507525975 }, "harness|hendrycksTest-international_law|5": { "acc": 0.8677685950413223, "acc_stderr": 0.03092278832044579, "acc_norm": 0.8677685950413223, "acc_norm_stderr": 0.03092278832044579 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.8333333333333334, "acc_stderr": 0.036028141763926456, "acc_norm": 0.8333333333333334, "acc_norm_stderr": 0.036028141763926456 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.803680981595092, "acc_stderr": 0.031207970394709218, "acc_norm": 0.803680981595092, "acc_norm_stderr": 0.031207970394709218 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.5892857142857143, "acc_stderr": 0.04669510663875191, "acc_norm": 0.5892857142857143, "acc_norm_stderr": 0.04669510663875191 }, "harness|hendrycksTest-management|5": { "acc": 0.8640776699029126, "acc_stderr": 0.0339329572976101, "acc_norm": 0.8640776699029126, "acc_norm_stderr": 0.0339329572976101 }, "harness|hendrycksTest-marketing|5": { "acc": 0.9188034188034188, "acc_stderr": 0.01789378490401853, "acc_norm": 0.9188034188034188, "acc_norm_stderr": 0.01789378490401853 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.76, "acc_stderr": 0.04292346959909282, "acc_norm": 0.76, "acc_norm_stderr": 0.04292346959909282 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.8786717752234994, "acc_stderr": 0.011675913883906723, "acc_norm": 0.8786717752234994, "acc_norm_stderr": 0.011675913883906723 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.8005780346820809, "acc_stderr": 0.021511900654252552, "acc_norm": 0.8005780346820809, "acc_norm_stderr": 0.021511900654252552 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.4547486033519553, "acc_stderr": 0.016653875777524006, "acc_norm": 0.4547486033519553, "acc_norm_stderr": 0.016653875777524006 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.8071895424836601, "acc_stderr": 0.0225893188881767, "acc_norm": 0.8071895424836601, "acc_norm_stderr": 0.0225893188881767 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.8038585209003215, "acc_stderr": 0.02255244778047802, "acc_norm": 0.8038585209003215, "acc_norm_stderr": 0.02255244778047802 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.8271604938271605, "acc_stderr": 0.021038517770157365, "acc_norm": 0.8271604938271605, "acc_norm_stderr": 0.021038517770157365 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.5567375886524822, "acc_stderr": 0.029634838473766006, "acc_norm": 0.5567375886524822, "acc_norm_stderr": 0.029634838473766006 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.5430247718383312, "acc_stderr": 0.012722869501611419, "acc_norm": 0.5430247718383312, "acc_norm_stderr": 0.012722869501611419 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.8014705882352942, "acc_stderr": 0.024231013370541087, "acc_norm": 0.8014705882352942, "acc_norm_stderr": 0.024231013370541087 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.7761437908496732, "acc_stderr": 0.016863008585416613, "acc_norm": 0.7761437908496732, "acc_norm_stderr": 0.016863008585416613 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.7181818181818181, "acc_stderr": 0.043091187099464585, "acc_norm": 0.7181818181818181, "acc_norm_stderr": 0.043091187099464585 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.8040816326530612, "acc_stderr": 0.025409301953225678, "acc_norm": 0.8040816326530612, "acc_norm_stderr": 0.025409301953225678 }, "harness|hendrycksTest-sociology|5": { "acc": 0.8706467661691543, "acc_stderr": 0.023729830881018526, "acc_norm": 0.8706467661691543, "acc_norm_stderr": 0.023729830881018526 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.88, "acc_stderr": 0.032659863237109066, "acc_norm": 0.88, "acc_norm_stderr": 0.032659863237109066 }, "harness|hendrycksTest-virology|5": { "acc": 0.5421686746987951, "acc_stderr": 0.0387862677100236, "acc_norm": 0.5421686746987951, "acc_norm_stderr": 0.0387862677100236 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.8830409356725146, "acc_stderr": 0.024648068961366145, "acc_norm": 0.8830409356725146, "acc_norm_stderr": 0.024648068961366145 }, "harness|truthfulqa:mc|0": { "mc1": 0.5250917992656059, "mc1_stderr": 0.017481446804104017, "mc2": 0.6741286843212236, "mc2_stderr": 0.015021442699186793 }, "harness|winogrande|5": { "acc": 0.8121546961325967, "acc_stderr": 0.010977481103435091 }, "harness|gsm8k|5": { "acc": 0.5822592873388931, "acc_stderr": 0.013584820638504823 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_JaeyeonKang__CCK_Gony_v3.3
[ "region:us" ]
2024-02-02T14:18:33+00:00
{"pretty_name": "Evaluation run of JaeyeonKang/CCK_Gony_v3.3", "dataset_summary": "Dataset automatically created during the evaluation run of model [JaeyeonKang/CCK_Gony_v3.3](https://huggingface.co/JaeyeonKang/CCK_Gony_v3.3) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_JaeyeonKang__CCK_Gony_v3.3\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-02-02T14:16:13.968146](https://huggingface.co/datasets/open-llm-leaderboard/details_JaeyeonKang__CCK_Gony_v3.3/blob/main/results_2024-02-02T14-16-13.968146.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.7127245389126519,\n \"acc_stderr\": 0.030324827741987404,\n \"acc_norm\": 0.7169197048023546,\n \"acc_norm_stderr\": 0.030907560438510125,\n \"mc1\": 0.5250917992656059,\n \"mc1_stderr\": 0.017481446804104017,\n \"mc2\": 0.6741286843212236,\n \"mc2_stderr\": 0.015021442699186793\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.674061433447099,\n \"acc_stderr\": 0.013697432466693242,\n \"acc_norm\": 0.7039249146757679,\n \"acc_norm_stderr\": 0.013340916085246252\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.6921927902808206,\n \"acc_stderr\": 0.004606429684604527,\n \"acc_norm\": 0.8788090021907986,\n \"acc_norm_stderr\": 0.0032568214188573178\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.37,\n \"acc_stderr\": 0.048523658709391,\n \"acc_norm\": 0.37,\n \"acc_norm_stderr\": 0.048523658709391\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.6814814814814815,\n \"acc_stderr\": 0.04024778401977108,\n \"acc_norm\": 0.6814814814814815,\n \"acc_norm_stderr\": 0.04024778401977108\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.7960526315789473,\n \"acc_stderr\": 0.032790004063100495,\n \"acc_norm\": 0.7960526315789473,\n \"acc_norm_stderr\": 0.032790004063100495\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.76,\n \"acc_stderr\": 0.04292346959909283,\n \"acc_norm\": 0.76,\n \"acc_norm_stderr\": 0.04292346959909283\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.7811320754716982,\n \"acc_stderr\": 0.0254478638251086,\n \"acc_norm\": 0.7811320754716982,\n \"acc_norm_stderr\": 0.0254478638251086\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.7986111111111112,\n \"acc_stderr\": 0.033536474697138406,\n \"acc_norm\": 0.7986111111111112,\n \"acc_norm_stderr\": 0.033536474697138406\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.54,\n \"acc_stderr\": 0.05009082659620332,\n \"acc_norm\": 0.54,\n \"acc_norm_stderr\": 0.05009082659620332\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.57,\n \"acc_stderr\": 0.04975698519562428,\n \"acc_norm\": 0.57,\n \"acc_norm_stderr\": 0.04975698519562428\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.48,\n \"acc_stderr\": 0.050211673156867795,\n \"acc_norm\": 0.48,\n \"acc_norm_stderr\": 0.050211673156867795\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.7456647398843931,\n \"acc_stderr\": 0.0332055644308557,\n \"acc_norm\": 0.7456647398843931,\n \"acc_norm_stderr\": 0.0332055644308557\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.4411764705882353,\n \"acc_stderr\": 0.049406356306056595,\n \"acc_norm\": 0.4411764705882353,\n \"acc_norm_stderr\": 0.049406356306056595\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.81,\n \"acc_stderr\": 0.039427724440366234,\n \"acc_norm\": 0.81,\n \"acc_norm_stderr\": 0.039427724440366234\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.6723404255319149,\n \"acc_stderr\": 0.03068302084323101,\n \"acc_norm\": 0.6723404255319149,\n \"acc_norm_stderr\": 0.03068302084323101\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.6228070175438597,\n \"acc_stderr\": 0.04559522141958216,\n \"acc_norm\": 0.6228070175438597,\n \"acc_norm_stderr\": 0.04559522141958216\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.6620689655172414,\n \"acc_stderr\": 0.039417076320648906,\n \"acc_norm\": 0.6620689655172414,\n \"acc_norm_stderr\": 0.039417076320648906\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.4947089947089947,\n \"acc_stderr\": 0.02574986828855657,\n \"acc_norm\": 0.4947089947089947,\n \"acc_norm_stderr\": 0.02574986828855657\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.5634920634920635,\n \"acc_stderr\": 0.04435932892851466,\n \"acc_norm\": 0.5634920634920635,\n \"acc_norm_stderr\": 0.04435932892851466\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.43,\n \"acc_stderr\": 0.049756985195624284,\n \"acc_norm\": 0.43,\n \"acc_norm_stderr\": 0.049756985195624284\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.8419354838709677,\n \"acc_stderr\": 0.020752831511875278,\n \"acc_norm\": 0.8419354838709677,\n \"acc_norm_stderr\": 0.020752831511875278\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.6009852216748769,\n \"acc_stderr\": 0.034454876862647144,\n \"acc_norm\": 0.6009852216748769,\n \"acc_norm_stderr\": 0.034454876862647144\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.75,\n \"acc_stderr\": 0.04351941398892446,\n \"acc_norm\": 0.75,\n \"acc_norm_stderr\": 0.04351941398892446\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.7818181818181819,\n \"acc_stderr\": 0.03225078108306289,\n \"acc_norm\": 0.7818181818181819,\n \"acc_norm_stderr\": 0.03225078108306289\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.8636363636363636,\n \"acc_stderr\": 0.024450155973189835,\n \"acc_norm\": 0.8636363636363636,\n \"acc_norm_stderr\": 0.024450155973189835\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.9481865284974094,\n \"acc_stderr\": 0.01599622932024412,\n \"acc_norm\": 0.9481865284974094,\n \"acc_norm_stderr\": 0.01599622932024412\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.7025641025641025,\n \"acc_stderr\": 0.023177408131465942,\n \"acc_norm\": 0.7025641025641025,\n \"acc_norm_stderr\": 0.023177408131465942\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.37777777777777777,\n \"acc_stderr\": 0.02956070739246572,\n \"acc_norm\": 0.37777777777777777,\n \"acc_norm_stderr\": 0.02956070739246572\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.7941176470588235,\n \"acc_stderr\": 0.026265024608275882,\n \"acc_norm\": 0.7941176470588235,\n \"acc_norm_stderr\": 0.026265024608275882\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.46357615894039733,\n \"acc_stderr\": 0.04071636065944215,\n \"acc_norm\": 0.46357615894039733,\n \"acc_norm_stderr\": 0.04071636065944215\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.8899082568807339,\n \"acc_stderr\": 0.013419939018681203,\n \"acc_norm\": 0.8899082568807339,\n \"acc_norm_stderr\": 0.013419939018681203\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.6111111111111112,\n \"acc_stderr\": 0.033247089118091176,\n \"acc_norm\": 0.6111111111111112,\n \"acc_norm_stderr\": 0.033247089118091176\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.8529411764705882,\n \"acc_stderr\": 0.02485747808025045,\n \"acc_norm\": 0.8529411764705882,\n \"acc_norm_stderr\": 0.02485747808025045\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.8523206751054853,\n \"acc_stderr\": 0.02309432958259569,\n \"acc_norm\": 0.8523206751054853,\n \"acc_norm_stderr\": 0.02309432958259569\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.7713004484304933,\n \"acc_stderr\": 0.028188240046929203,\n \"acc_norm\": 0.7713004484304933,\n \"acc_norm_stderr\": 0.028188240046929203\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.8091603053435115,\n \"acc_stderr\": 0.034465133507525975,\n \"acc_norm\": 0.8091603053435115,\n \"acc_norm_stderr\": 0.034465133507525975\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.8677685950413223,\n \"acc_stderr\": 0.03092278832044579,\n \"acc_norm\": 0.8677685950413223,\n \"acc_norm_stderr\": 0.03092278832044579\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.8333333333333334,\n \"acc_stderr\": 0.036028141763926456,\n \"acc_norm\": 0.8333333333333334,\n \"acc_norm_stderr\": 0.036028141763926456\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.803680981595092,\n \"acc_stderr\": 0.031207970394709218,\n \"acc_norm\": 0.803680981595092,\n \"acc_norm_stderr\": 0.031207970394709218\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.5892857142857143,\n \"acc_stderr\": 0.04669510663875191,\n \"acc_norm\": 0.5892857142857143,\n \"acc_norm_stderr\": 0.04669510663875191\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.8640776699029126,\n \"acc_stderr\": 0.0339329572976101,\n \"acc_norm\": 0.8640776699029126,\n \"acc_norm_stderr\": 0.0339329572976101\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.9188034188034188,\n \"acc_stderr\": 0.01789378490401853,\n \"acc_norm\": 0.9188034188034188,\n \"acc_norm_stderr\": 0.01789378490401853\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.76,\n \"acc_stderr\": 0.04292346959909282,\n \"acc_norm\": 0.76,\n \"acc_norm_stderr\": 0.04292346959909282\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.8786717752234994,\n \"acc_stderr\": 0.011675913883906723,\n \"acc_norm\": 0.8786717752234994,\n \"acc_norm_stderr\": 0.011675913883906723\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.8005780346820809,\n \"acc_stderr\": 0.021511900654252552,\n \"acc_norm\": 0.8005780346820809,\n \"acc_norm_stderr\": 0.021511900654252552\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.4547486033519553,\n \"acc_stderr\": 0.016653875777524006,\n \"acc_norm\": 0.4547486033519553,\n \"acc_norm_stderr\": 0.016653875777524006\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.8071895424836601,\n \"acc_stderr\": 0.0225893188881767,\n \"acc_norm\": 0.8071895424836601,\n \"acc_norm_stderr\": 0.0225893188881767\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.8038585209003215,\n \"acc_stderr\": 0.02255244778047802,\n \"acc_norm\": 0.8038585209003215,\n \"acc_norm_stderr\": 0.02255244778047802\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.8271604938271605,\n \"acc_stderr\": 0.021038517770157365,\n \"acc_norm\": 0.8271604938271605,\n \"acc_norm_stderr\": 0.021038517770157365\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.5567375886524822,\n \"acc_stderr\": 0.029634838473766006,\n \"acc_norm\": 0.5567375886524822,\n \"acc_norm_stderr\": 0.029634838473766006\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.5430247718383312,\n \"acc_stderr\": 0.012722869501611419,\n \"acc_norm\": 0.5430247718383312,\n \"acc_norm_stderr\": 0.012722869501611419\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.8014705882352942,\n \"acc_stderr\": 0.024231013370541087,\n \"acc_norm\": 0.8014705882352942,\n \"acc_norm_stderr\": 0.024231013370541087\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.7761437908496732,\n \"acc_stderr\": 0.016863008585416613,\n \"acc_norm\": 0.7761437908496732,\n \"acc_norm_stderr\": 0.016863008585416613\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.7181818181818181,\n \"acc_stderr\": 0.043091187099464585,\n \"acc_norm\": 0.7181818181818181,\n \"acc_norm_stderr\": 0.043091187099464585\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.8040816326530612,\n \"acc_stderr\": 0.025409301953225678,\n \"acc_norm\": 0.8040816326530612,\n \"acc_norm_stderr\": 0.025409301953225678\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.8706467661691543,\n \"acc_stderr\": 0.023729830881018526,\n \"acc_norm\": 0.8706467661691543,\n \"acc_norm_stderr\": 0.023729830881018526\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.88,\n \"acc_stderr\": 0.032659863237109066,\n \"acc_norm\": 0.88,\n \"acc_norm_stderr\": 0.032659863237109066\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.5421686746987951,\n \"acc_stderr\": 0.0387862677100236,\n \"acc_norm\": 0.5421686746987951,\n \"acc_norm_stderr\": 0.0387862677100236\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.8830409356725146,\n \"acc_stderr\": 0.024648068961366145,\n \"acc_norm\": 0.8830409356725146,\n \"acc_norm_stderr\": 0.024648068961366145\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.5250917992656059,\n \"mc1_stderr\": 0.017481446804104017,\n \"mc2\": 0.6741286843212236,\n \"mc2_stderr\": 0.015021442699186793\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.8121546961325967,\n \"acc_stderr\": 0.010977481103435091\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.5822592873388931,\n \"acc_stderr\": 0.013584820638504823\n }\n}\n```", "repo_url": "https://huggingface.co/JaeyeonKang/CCK_Gony_v3.3", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_02_02T14_16_13.968146", "path": ["**/details_harness|arc:challenge|25_2024-02-02T14-16-13.968146.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-02-02T14-16-13.968146.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_02_02T14_16_13.968146", "path": ["**/details_harness|gsm8k|5_2024-02-02T14-16-13.968146.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-02-02T14-16-13.968146.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_02_02T14_16_13.968146", "path": ["**/details_harness|hellaswag|10_2024-02-02T14-16-13.968146.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-02-02T14-16-13.968146.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_02_02T14_16_13.968146", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-02T14-16-13.968146.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-02-02T14-16-13.968146.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-02-02T14-16-13.968146.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-02-02T14-16-13.968146.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-02T14-16-13.968146.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-02-02T14-16-13.968146.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-02-02T14-16-13.968146.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-02-02T14-16-13.968146.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-02-02T14-16-13.968146.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-02-02T14-16-13.968146.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-02-02T14-16-13.968146.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-02-02T14-16-13.968146.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-02T14-16-13.968146.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-02-02T14-16-13.968146.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-02T14-16-13.968146.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-02T14-16-13.968146.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-02-02T14-16-13.968146.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-02-02T14-16-13.968146.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-02-02T14-16-13.968146.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-02T14-16-13.968146.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-02T14-16-13.968146.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-02T14-16-13.968146.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-02-02T14-16-13.968146.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-02T14-16-13.968146.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-02T14-16-13.968146.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-02T14-16-13.968146.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-02T14-16-13.968146.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-02-02T14-16-13.968146.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-02T14-16-13.968146.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-02T14-16-13.968146.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-02T14-16-13.968146.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-02T14-16-13.968146.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-02-02T14-16-13.968146.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-02-02T14-16-13.968146.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-02-02T14-16-13.968146.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-02-02T14-16-13.968146.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-02T14-16-13.968146.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-02-02T14-16-13.968146.parquet", "**/details_harness|hendrycksTest-management|5_2024-02-02T14-16-13.968146.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-02-02T14-16-13.968146.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-02-02T14-16-13.968146.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-02-02T14-16-13.968146.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-02-02T14-16-13.968146.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-02T14-16-13.968146.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-02-02T14-16-13.968146.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-02-02T14-16-13.968146.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-02-02T14-16-13.968146.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-02-02T14-16-13.968146.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-02-02T14-16-13.968146.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-02-02T14-16-13.968146.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-02-02T14-16-13.968146.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-02-02T14-16-13.968146.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-02-02T14-16-13.968146.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-02-02T14-16-13.968146.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-02T14-16-13.968146.parquet", "**/details_harness|hendrycksTest-virology|5_2024-02-02T14-16-13.968146.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-02-02T14-16-13.968146.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-02T14-16-13.968146.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-02-02T14-16-13.968146.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-02-02T14-16-13.968146.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-02-02T14-16-13.968146.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-02T14-16-13.968146.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-02-02T14-16-13.968146.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-02-02T14-16-13.968146.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-02-02T14-16-13.968146.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-02-02T14-16-13.968146.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-02-02T14-16-13.968146.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-02-02T14-16-13.968146.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-02-02T14-16-13.968146.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-02T14-16-13.968146.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-02-02T14-16-13.968146.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-02T14-16-13.968146.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-02T14-16-13.968146.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-02-02T14-16-13.968146.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-02-02T14-16-13.968146.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-02-02T14-16-13.968146.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-02T14-16-13.968146.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-02T14-16-13.968146.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-02T14-16-13.968146.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-02-02T14-16-13.968146.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-02T14-16-13.968146.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-02T14-16-13.968146.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-02T14-16-13.968146.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-02T14-16-13.968146.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-02-02T14-16-13.968146.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-02T14-16-13.968146.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-02T14-16-13.968146.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-02T14-16-13.968146.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-02T14-16-13.968146.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-02-02T14-16-13.968146.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-02-02T14-16-13.968146.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-02-02T14-16-13.968146.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-02-02T14-16-13.968146.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-02T14-16-13.968146.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-02-02T14-16-13.968146.parquet", "**/details_harness|hendrycksTest-management|5_2024-02-02T14-16-13.968146.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-02-02T14-16-13.968146.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-02-02T14-16-13.968146.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-02-02T14-16-13.968146.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-02-02T14-16-13.968146.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-02T14-16-13.968146.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-02-02T14-16-13.968146.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-02-02T14-16-13.968146.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-02-02T14-16-13.968146.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-02-02T14-16-13.968146.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-02-02T14-16-13.968146.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-02-02T14-16-13.968146.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-02-02T14-16-13.968146.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-02-02T14-16-13.968146.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-02-02T14-16-13.968146.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-02-02T14-16-13.968146.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-02T14-16-13.968146.parquet", "**/details_harness|hendrycksTest-virology|5_2024-02-02T14-16-13.968146.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-02-02T14-16-13.968146.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_02_02T14_16_13.968146", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-02T14-16-13.968146.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-02T14-16-13.968146.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_02_02T14_16_13.968146", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-02-02T14-16-13.968146.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-02-02T14-16-13.968146.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_02_02T14_16_13.968146", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-02-02T14-16-13.968146.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-02-02T14-16-13.968146.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_02_02T14_16_13.968146", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-02-02T14-16-13.968146.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-02-02T14-16-13.968146.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_02_02T14_16_13.968146", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-02T14-16-13.968146.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-02T14-16-13.968146.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_02_02T14_16_13.968146", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-02-02T14-16-13.968146.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-02-02T14-16-13.968146.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_02_02T14_16_13.968146", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-02-02T14-16-13.968146.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-02-02T14-16-13.968146.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_02_02T14_16_13.968146", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-02-02T14-16-13.968146.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-02-02T14-16-13.968146.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_02_02T14_16_13.968146", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-02-02T14-16-13.968146.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-02-02T14-16-13.968146.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_02_02T14_16_13.968146", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-02-02T14-16-13.968146.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-02-02T14-16-13.968146.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_02_02T14_16_13.968146", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-02-02T14-16-13.968146.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-02-02T14-16-13.968146.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_02_02T14_16_13.968146", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-02-02T14-16-13.968146.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-02-02T14-16-13.968146.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_02_02T14_16_13.968146", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-02T14-16-13.968146.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-02T14-16-13.968146.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_02_02T14_16_13.968146", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-02-02T14-16-13.968146.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-02-02T14-16-13.968146.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_02_02T14_16_13.968146", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-02T14-16-13.968146.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-02T14-16-13.968146.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_02_02T14_16_13.968146", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-02T14-16-13.968146.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-02T14-16-13.968146.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_02_02T14_16_13.968146", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-02-02T14-16-13.968146.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-02-02T14-16-13.968146.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_02_02T14_16_13.968146", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-02-02T14-16-13.968146.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-02-02T14-16-13.968146.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_02_02T14_16_13.968146", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-02-02T14-16-13.968146.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-02-02T14-16-13.968146.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_02_02T14_16_13.968146", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-02T14-16-13.968146.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-02T14-16-13.968146.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_02_02T14_16_13.968146", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-02T14-16-13.968146.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-02T14-16-13.968146.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_02_02T14_16_13.968146", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-02T14-16-13.968146.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-02T14-16-13.968146.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_02_02T14_16_13.968146", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-02-02T14-16-13.968146.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-02-02T14-16-13.968146.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_02_02T14_16_13.968146", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-02T14-16-13.968146.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-02T14-16-13.968146.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_02_02T14_16_13.968146", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-02T14-16-13.968146.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-02T14-16-13.968146.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_02_02T14_16_13.968146", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-02T14-16-13.968146.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-02T14-16-13.968146.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_02_02T14_16_13.968146", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-02T14-16-13.968146.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-02T14-16-13.968146.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_02_02T14_16_13.968146", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-02-02T14-16-13.968146.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-02-02T14-16-13.968146.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_02_02T14_16_13.968146", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-02T14-16-13.968146.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-02T14-16-13.968146.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_02_02T14_16_13.968146", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-02T14-16-13.968146.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-02T14-16-13.968146.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_02_02T14_16_13.968146", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-02T14-16-13.968146.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-02T14-16-13.968146.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_02_02T14_16_13.968146", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-02T14-16-13.968146.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-02T14-16-13.968146.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_02_02T14_16_13.968146", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-02-02T14-16-13.968146.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-02-02T14-16-13.968146.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_02_02T14_16_13.968146", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-02-02T14-16-13.968146.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-02-02T14-16-13.968146.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_02_02T14_16_13.968146", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-02-02T14-16-13.968146.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-02-02T14-16-13.968146.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_02_02T14_16_13.968146", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-02-02T14-16-13.968146.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-02-02T14-16-13.968146.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_02_02T14_16_13.968146", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-02T14-16-13.968146.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-02T14-16-13.968146.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_02_02T14_16_13.968146", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-02-02T14-16-13.968146.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-02-02T14-16-13.968146.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_02_02T14_16_13.968146", "path": ["**/details_harness|hendrycksTest-management|5_2024-02-02T14-16-13.968146.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-02-02T14-16-13.968146.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_02_02T14_16_13.968146", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-02-02T14-16-13.968146.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-02-02T14-16-13.968146.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_02_02T14_16_13.968146", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-02-02T14-16-13.968146.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-02-02T14-16-13.968146.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_02_02T14_16_13.968146", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-02-02T14-16-13.968146.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-02-02T14-16-13.968146.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_02_02T14_16_13.968146", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-02-02T14-16-13.968146.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-02-02T14-16-13.968146.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_02_02T14_16_13.968146", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-02T14-16-13.968146.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-02T14-16-13.968146.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_02_02T14_16_13.968146", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-02-02T14-16-13.968146.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-02-02T14-16-13.968146.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_02_02T14_16_13.968146", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-02-02T14-16-13.968146.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-02-02T14-16-13.968146.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_02_02T14_16_13.968146", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-02-02T14-16-13.968146.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-02-02T14-16-13.968146.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_02_02T14_16_13.968146", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-02-02T14-16-13.968146.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-02-02T14-16-13.968146.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_02_02T14_16_13.968146", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-02-02T14-16-13.968146.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-02-02T14-16-13.968146.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_02_02T14_16_13.968146", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-02-02T14-16-13.968146.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-02-02T14-16-13.968146.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_02_02T14_16_13.968146", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-02-02T14-16-13.968146.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-02-02T14-16-13.968146.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_02_02T14_16_13.968146", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-02-02T14-16-13.968146.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-02-02T14-16-13.968146.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_02_02T14_16_13.968146", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-02-02T14-16-13.968146.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-02-02T14-16-13.968146.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_02_02T14_16_13.968146", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-02-02T14-16-13.968146.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-02-02T14-16-13.968146.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_02_02T14_16_13.968146", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-02T14-16-13.968146.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-02T14-16-13.968146.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_02_02T14_16_13.968146", "path": ["**/details_harness|hendrycksTest-virology|5_2024-02-02T14-16-13.968146.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-02-02T14-16-13.968146.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_02_02T14_16_13.968146", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-02-02T14-16-13.968146.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-02-02T14-16-13.968146.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_02_02T14_16_13.968146", "path": ["**/details_harness|truthfulqa:mc|0_2024-02-02T14-16-13.968146.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-02-02T14-16-13.968146.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_02_02T14_16_13.968146", "path": ["**/details_harness|winogrande|5_2024-02-02T14-16-13.968146.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-02-02T14-16-13.968146.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_02_02T14_16_13.968146", "path": ["results_2024-02-02T14-16-13.968146.parquet"]}, {"split": "latest", "path": ["results_2024-02-02T14-16-13.968146.parquet"]}]}]}
2024-02-02T14:19:03+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of JaeyeonKang/CCK_Gony_v3.3 Dataset automatically created during the evaluation run of model JaeyeonKang/CCK_Gony_v3.3 on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-02-02T14:16:13.968146(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of JaeyeonKang/CCK_Gony_v3.3\n\n\n\nDataset automatically created during the evaluation run of model JaeyeonKang/CCK_Gony_v3.3 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-02-02T14:16:13.968146(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of JaeyeonKang/CCK_Gony_v3.3\n\n\n\nDataset automatically created during the evaluation run of model JaeyeonKang/CCK_Gony_v3.3 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-02-02T14:16:13.968146(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
60d4d5dc66fcfb213ae57c5593bb50784320e9e4
# Dataset Card for Evaluation run of pleisto/yuren-13b-chatml <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [pleisto/yuren-13b-chatml](https://huggingface.co/pleisto/yuren-13b-chatml) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_pleisto__yuren-13b-chatml", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-02-02T14:25:58.778785](https://huggingface.co/datasets/open-llm-leaderboard/details_pleisto__yuren-13b-chatml/blob/main/results_2024-02-02T14-25-58.778785.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.5609180402171289, "acc_stderr": 0.033689189023125767, "acc_norm": 0.5665108554666364, "acc_norm_stderr": 0.03439957265639205, "mc1": 0.29253365973072215, "mc1_stderr": 0.015925597445286165, "mc2": 0.42324311782084495, "mc2_stderr": 0.014813023987866733 }, "harness|arc:challenge|25": { "acc": 0.49658703071672355, "acc_stderr": 0.014611050403244077, "acc_norm": 0.5307167235494881, "acc_norm_stderr": 0.014583792546304037 }, "harness|hellaswag|10": { "acc": 0.5807608046205935, "acc_stderr": 0.004924261467934419, "acc_norm": 0.7803226448914559, "acc_norm_stderr": 0.004131818797713882 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.31, "acc_stderr": 0.046482319871173156, "acc_norm": 0.31, "acc_norm_stderr": 0.046482319871173156 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.5259259259259259, "acc_stderr": 0.04313531696750574, "acc_norm": 0.5259259259259259, "acc_norm_stderr": 0.04313531696750574 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.4934210526315789, "acc_stderr": 0.04068590050224971, "acc_norm": 0.4934210526315789, "acc_norm_stderr": 0.04068590050224971 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.56, "acc_stderr": 0.04988876515698589, "acc_norm": 0.56, "acc_norm_stderr": 0.04988876515698589 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.5735849056603773, "acc_stderr": 0.03043779434298305, "acc_norm": 0.5735849056603773, "acc_norm_stderr": 0.03043779434298305 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.6041666666666666, "acc_stderr": 0.04089465449325582, "acc_norm": 0.6041666666666666, "acc_norm_stderr": 0.04089465449325582 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.5, "acc_stderr": 0.050251890762960605, "acc_norm": 0.5, "acc_norm_stderr": 0.050251890762960605 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.47, "acc_stderr": 0.050161355804659205, "acc_norm": 0.47, "acc_norm_stderr": 0.050161355804659205 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.32, "acc_stderr": 0.046882617226215034, "acc_norm": 0.32, "acc_norm_stderr": 0.046882617226215034 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.5086705202312138, "acc_stderr": 0.03811890988940412, "acc_norm": 0.5086705202312138, "acc_norm_stderr": 0.03811890988940412 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.30392156862745096, "acc_stderr": 0.045766654032077636, "acc_norm": 0.30392156862745096, "acc_norm_stderr": 0.045766654032077636 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.74, "acc_stderr": 0.04408440022768078, "acc_norm": 0.74, "acc_norm_stderr": 0.04408440022768078 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.425531914893617, "acc_stderr": 0.03232146916224467, "acc_norm": 0.425531914893617, "acc_norm_stderr": 0.03232146916224467 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.34210526315789475, "acc_stderr": 0.044629175353369376, "acc_norm": 0.34210526315789475, "acc_norm_stderr": 0.044629175353369376 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.5103448275862069, "acc_stderr": 0.04165774775728763, "acc_norm": 0.5103448275862069, "acc_norm_stderr": 0.04165774775728763 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.32275132275132273, "acc_stderr": 0.024078943243597016, "acc_norm": 0.32275132275132273, "acc_norm_stderr": 0.024078943243597016 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.3333333333333333, "acc_stderr": 0.04216370213557835, "acc_norm": 0.3333333333333333, "acc_norm_stderr": 0.04216370213557835 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.34, "acc_stderr": 0.04760952285695235, "acc_norm": 0.34, "acc_norm_stderr": 0.04760952285695235 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.6645161290322581, "acc_stderr": 0.026860206444724356, "acc_norm": 0.6645161290322581, "acc_norm_stderr": 0.026860206444724356 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.4187192118226601, "acc_stderr": 0.03471192860518468, "acc_norm": 0.4187192118226601, "acc_norm_stderr": 0.03471192860518468 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.61, "acc_stderr": 0.04902071300001975, "acc_norm": 0.61, "acc_norm_stderr": 0.04902071300001975 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.703030303030303, "acc_stderr": 0.035679697722680495, "acc_norm": 0.703030303030303, "acc_norm_stderr": 0.035679697722680495 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.7070707070707071, "acc_stderr": 0.03242497958178815, "acc_norm": 0.7070707070707071, "acc_norm_stderr": 0.03242497958178815 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.7616580310880829, "acc_stderr": 0.030748905363909878, "acc_norm": 0.7616580310880829, "acc_norm_stderr": 0.030748905363909878 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.5538461538461539, "acc_stderr": 0.02520357177302833, "acc_norm": 0.5538461538461539, "acc_norm_stderr": 0.02520357177302833 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.3148148148148148, "acc_stderr": 0.028317533496066468, "acc_norm": 0.3148148148148148, "acc_norm_stderr": 0.028317533496066468 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.5672268907563025, "acc_stderr": 0.032183581077426124, "acc_norm": 0.5672268907563025, "acc_norm_stderr": 0.032183581077426124 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.3576158940397351, "acc_stderr": 0.03913453431177258, "acc_norm": 0.3576158940397351, "acc_norm_stderr": 0.03913453431177258 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.710091743119266, "acc_stderr": 0.019453066609201597, "acc_norm": 0.710091743119266, "acc_norm_stderr": 0.019453066609201597 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.47685185185185186, "acc_stderr": 0.034063153607115065, "acc_norm": 0.47685185185185186, "acc_norm_stderr": 0.034063153607115065 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.7107843137254902, "acc_stderr": 0.031822318676475544, "acc_norm": 0.7107843137254902, "acc_norm_stderr": 0.031822318676475544 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.7721518987341772, "acc_stderr": 0.027303484599069422, "acc_norm": 0.7721518987341772, "acc_norm_stderr": 0.027303484599069422 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.6636771300448431, "acc_stderr": 0.031708824268455, "acc_norm": 0.6636771300448431, "acc_norm_stderr": 0.031708824268455 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.6412213740458015, "acc_stderr": 0.04206739313864908, "acc_norm": 0.6412213740458015, "acc_norm_stderr": 0.04206739313864908 }, "harness|hendrycksTest-international_law|5": { "acc": 0.743801652892562, "acc_stderr": 0.03984979653302872, "acc_norm": 0.743801652892562, "acc_norm_stderr": 0.03984979653302872 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.6666666666666666, "acc_stderr": 0.04557239513497752, "acc_norm": 0.6666666666666666, "acc_norm_stderr": 0.04557239513497752 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.7116564417177914, "acc_stderr": 0.03559039531617342, "acc_norm": 0.7116564417177914, "acc_norm_stderr": 0.03559039531617342 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.38392857142857145, "acc_stderr": 0.046161430750285476, "acc_norm": 0.38392857142857145, "acc_norm_stderr": 0.046161430750285476 }, "harness|hendrycksTest-management|5": { "acc": 0.7766990291262136, "acc_stderr": 0.04123553189891431, "acc_norm": 0.7766990291262136, "acc_norm_stderr": 0.04123553189891431 }, "harness|hendrycksTest-marketing|5": { "acc": 0.8376068376068376, "acc_stderr": 0.024161618127987745, "acc_norm": 0.8376068376068376, "acc_norm_stderr": 0.024161618127987745 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.56, "acc_stderr": 0.0498887651569859, "acc_norm": 0.56, "acc_norm_stderr": 0.0498887651569859 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.756066411238825, "acc_stderr": 0.015357212665829472, "acc_norm": 0.756066411238825, "acc_norm_stderr": 0.015357212665829472 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.6705202312138728, "acc_stderr": 0.025305258131879702, "acc_norm": 0.6705202312138728, "acc_norm_stderr": 0.025305258131879702 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.3865921787709497, "acc_stderr": 0.01628667487910102, "acc_norm": 0.3865921787709497, "acc_norm_stderr": 0.01628667487910102 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.6013071895424836, "acc_stderr": 0.028036092273891772, "acc_norm": 0.6013071895424836, "acc_norm_stderr": 0.028036092273891772 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.6334405144694534, "acc_stderr": 0.02736807824397163, "acc_norm": 0.6334405144694534, "acc_norm_stderr": 0.02736807824397163 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.6327160493827161, "acc_stderr": 0.02682280175950789, "acc_norm": 0.6327160493827161, "acc_norm_stderr": 0.02682280175950789 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.4219858156028369, "acc_stderr": 0.029462189233370597, "acc_norm": 0.4219858156028369, "acc_norm_stderr": 0.029462189233370597 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.42242503259452413, "acc_stderr": 0.012615600475734921, "acc_norm": 0.42242503259452413, "acc_norm_stderr": 0.012615600475734921 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.47794117647058826, "acc_stderr": 0.030343264224213535, "acc_norm": 0.47794117647058826, "acc_norm_stderr": 0.030343264224213535 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.5490196078431373, "acc_stderr": 0.020130388312904528, "acc_norm": 0.5490196078431373, "acc_norm_stderr": 0.020130388312904528 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.6454545454545455, "acc_stderr": 0.045820048415054174, "acc_norm": 0.6454545454545455, "acc_norm_stderr": 0.045820048415054174 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.6571428571428571, "acc_stderr": 0.030387262919547728, "acc_norm": 0.6571428571428571, "acc_norm_stderr": 0.030387262919547728 }, "harness|hendrycksTest-sociology|5": { "acc": 0.7263681592039801, "acc_stderr": 0.031524391865554016, "acc_norm": 0.7263681592039801, "acc_norm_stderr": 0.031524391865554016 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.8, "acc_stderr": 0.04020151261036847, "acc_norm": 0.8, "acc_norm_stderr": 0.04020151261036847 }, "harness|hendrycksTest-virology|5": { "acc": 0.42771084337349397, "acc_stderr": 0.038515976837185335, "acc_norm": 0.42771084337349397, "acc_norm_stderr": 0.038515976837185335 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.8070175438596491, "acc_stderr": 0.030267457554898458, "acc_norm": 0.8070175438596491, "acc_norm_stderr": 0.030267457554898458 }, "harness|truthfulqa:mc|0": { "mc1": 0.29253365973072215, "mc1_stderr": 0.015925597445286165, "mc2": 0.42324311782084495, "mc2_stderr": 0.014813023987866733 }, "harness|winogrande|5": { "acc": 0.744277821625888, "acc_stderr": 0.012261253845440474 }, "harness|gsm8k|5": { "acc": 0.2812736921910538, "acc_stderr": 0.012384789310940236 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_pleisto__yuren-13b-chatml
[ "region:us" ]
2024-02-02T14:28:19+00:00
{"pretty_name": "Evaluation run of pleisto/yuren-13b-chatml", "dataset_summary": "Dataset automatically created during the evaluation run of model [pleisto/yuren-13b-chatml](https://huggingface.co/pleisto/yuren-13b-chatml) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_pleisto__yuren-13b-chatml\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-02-02T14:25:58.778785](https://huggingface.co/datasets/open-llm-leaderboard/details_pleisto__yuren-13b-chatml/blob/main/results_2024-02-02T14-25-58.778785.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.5609180402171289,\n \"acc_stderr\": 0.033689189023125767,\n \"acc_norm\": 0.5665108554666364,\n \"acc_norm_stderr\": 0.03439957265639205,\n \"mc1\": 0.29253365973072215,\n \"mc1_stderr\": 0.015925597445286165,\n \"mc2\": 0.42324311782084495,\n \"mc2_stderr\": 0.014813023987866733\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.49658703071672355,\n \"acc_stderr\": 0.014611050403244077,\n \"acc_norm\": 0.5307167235494881,\n \"acc_norm_stderr\": 0.014583792546304037\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.5807608046205935,\n \"acc_stderr\": 0.004924261467934419,\n \"acc_norm\": 0.7803226448914559,\n \"acc_norm_stderr\": 0.004131818797713882\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.31,\n \"acc_stderr\": 0.046482319871173156,\n \"acc_norm\": 0.31,\n \"acc_norm_stderr\": 0.046482319871173156\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.5259259259259259,\n \"acc_stderr\": 0.04313531696750574,\n \"acc_norm\": 0.5259259259259259,\n \"acc_norm_stderr\": 0.04313531696750574\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.4934210526315789,\n \"acc_stderr\": 0.04068590050224971,\n \"acc_norm\": 0.4934210526315789,\n \"acc_norm_stderr\": 0.04068590050224971\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.56,\n \"acc_stderr\": 0.04988876515698589,\n \"acc_norm\": 0.56,\n \"acc_norm_stderr\": 0.04988876515698589\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.5735849056603773,\n \"acc_stderr\": 0.03043779434298305,\n \"acc_norm\": 0.5735849056603773,\n \"acc_norm_stderr\": 0.03043779434298305\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.6041666666666666,\n \"acc_stderr\": 0.04089465449325582,\n \"acc_norm\": 0.6041666666666666,\n \"acc_norm_stderr\": 0.04089465449325582\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.5,\n \"acc_stderr\": 0.050251890762960605,\n \"acc_norm\": 0.5,\n \"acc_norm_stderr\": 0.050251890762960605\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.47,\n \"acc_stderr\": 0.050161355804659205,\n \"acc_norm\": 0.47,\n \"acc_norm_stderr\": 0.050161355804659205\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.32,\n \"acc_stderr\": 0.046882617226215034,\n \"acc_norm\": 0.32,\n \"acc_norm_stderr\": 0.046882617226215034\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.5086705202312138,\n \"acc_stderr\": 0.03811890988940412,\n \"acc_norm\": 0.5086705202312138,\n \"acc_norm_stderr\": 0.03811890988940412\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.30392156862745096,\n \"acc_stderr\": 0.045766654032077636,\n \"acc_norm\": 0.30392156862745096,\n \"acc_norm_stderr\": 0.045766654032077636\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.74,\n \"acc_stderr\": 0.04408440022768078,\n \"acc_norm\": 0.74,\n \"acc_norm_stderr\": 0.04408440022768078\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.425531914893617,\n \"acc_stderr\": 0.03232146916224467,\n \"acc_norm\": 0.425531914893617,\n \"acc_norm_stderr\": 0.03232146916224467\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.34210526315789475,\n \"acc_stderr\": 0.044629175353369376,\n \"acc_norm\": 0.34210526315789475,\n \"acc_norm_stderr\": 0.044629175353369376\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.5103448275862069,\n \"acc_stderr\": 0.04165774775728763,\n \"acc_norm\": 0.5103448275862069,\n \"acc_norm_stderr\": 0.04165774775728763\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.32275132275132273,\n \"acc_stderr\": 0.024078943243597016,\n \"acc_norm\": 0.32275132275132273,\n \"acc_norm_stderr\": 0.024078943243597016\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.3333333333333333,\n \"acc_stderr\": 0.04216370213557835,\n \"acc_norm\": 0.3333333333333333,\n \"acc_norm_stderr\": 0.04216370213557835\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.34,\n \"acc_stderr\": 0.04760952285695235,\n \"acc_norm\": 0.34,\n \"acc_norm_stderr\": 0.04760952285695235\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.6645161290322581,\n \"acc_stderr\": 0.026860206444724356,\n \"acc_norm\": 0.6645161290322581,\n \"acc_norm_stderr\": 0.026860206444724356\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.4187192118226601,\n \"acc_stderr\": 0.03471192860518468,\n \"acc_norm\": 0.4187192118226601,\n \"acc_norm_stderr\": 0.03471192860518468\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.61,\n \"acc_stderr\": 0.04902071300001975,\n \"acc_norm\": 0.61,\n \"acc_norm_stderr\": 0.04902071300001975\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.703030303030303,\n \"acc_stderr\": 0.035679697722680495,\n \"acc_norm\": 0.703030303030303,\n \"acc_norm_stderr\": 0.035679697722680495\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.7070707070707071,\n \"acc_stderr\": 0.03242497958178815,\n \"acc_norm\": 0.7070707070707071,\n \"acc_norm_stderr\": 0.03242497958178815\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.7616580310880829,\n \"acc_stderr\": 0.030748905363909878,\n \"acc_norm\": 0.7616580310880829,\n \"acc_norm_stderr\": 0.030748905363909878\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.5538461538461539,\n \"acc_stderr\": 0.02520357177302833,\n \"acc_norm\": 0.5538461538461539,\n \"acc_norm_stderr\": 0.02520357177302833\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.3148148148148148,\n \"acc_stderr\": 0.028317533496066468,\n \"acc_norm\": 0.3148148148148148,\n \"acc_norm_stderr\": 0.028317533496066468\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.5672268907563025,\n \"acc_stderr\": 0.032183581077426124,\n \"acc_norm\": 0.5672268907563025,\n \"acc_norm_stderr\": 0.032183581077426124\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.3576158940397351,\n \"acc_stderr\": 0.03913453431177258,\n \"acc_norm\": 0.3576158940397351,\n \"acc_norm_stderr\": 0.03913453431177258\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.710091743119266,\n \"acc_stderr\": 0.019453066609201597,\n \"acc_norm\": 0.710091743119266,\n \"acc_norm_stderr\": 0.019453066609201597\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.47685185185185186,\n \"acc_stderr\": 0.034063153607115065,\n \"acc_norm\": 0.47685185185185186,\n \"acc_norm_stderr\": 0.034063153607115065\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.7107843137254902,\n \"acc_stderr\": 0.031822318676475544,\n \"acc_norm\": 0.7107843137254902,\n \"acc_norm_stderr\": 0.031822318676475544\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.7721518987341772,\n \"acc_stderr\": 0.027303484599069422,\n \"acc_norm\": 0.7721518987341772,\n \"acc_norm_stderr\": 0.027303484599069422\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.6636771300448431,\n \"acc_stderr\": 0.031708824268455,\n \"acc_norm\": 0.6636771300448431,\n \"acc_norm_stderr\": 0.031708824268455\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.6412213740458015,\n \"acc_stderr\": 0.04206739313864908,\n \"acc_norm\": 0.6412213740458015,\n \"acc_norm_stderr\": 0.04206739313864908\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.743801652892562,\n \"acc_stderr\": 0.03984979653302872,\n \"acc_norm\": 0.743801652892562,\n \"acc_norm_stderr\": 0.03984979653302872\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.6666666666666666,\n \"acc_stderr\": 0.04557239513497752,\n \"acc_norm\": 0.6666666666666666,\n \"acc_norm_stderr\": 0.04557239513497752\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.7116564417177914,\n \"acc_stderr\": 0.03559039531617342,\n \"acc_norm\": 0.7116564417177914,\n \"acc_norm_stderr\": 0.03559039531617342\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.38392857142857145,\n \"acc_stderr\": 0.046161430750285476,\n \"acc_norm\": 0.38392857142857145,\n \"acc_norm_stderr\": 0.046161430750285476\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.7766990291262136,\n \"acc_stderr\": 0.04123553189891431,\n \"acc_norm\": 0.7766990291262136,\n \"acc_norm_stderr\": 0.04123553189891431\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8376068376068376,\n \"acc_stderr\": 0.024161618127987745,\n \"acc_norm\": 0.8376068376068376,\n \"acc_norm_stderr\": 0.024161618127987745\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.56,\n \"acc_stderr\": 0.0498887651569859,\n \"acc_norm\": 0.56,\n \"acc_norm_stderr\": 0.0498887651569859\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.756066411238825,\n \"acc_stderr\": 0.015357212665829472,\n \"acc_norm\": 0.756066411238825,\n \"acc_norm_stderr\": 0.015357212665829472\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.6705202312138728,\n \"acc_stderr\": 0.025305258131879702,\n \"acc_norm\": 0.6705202312138728,\n \"acc_norm_stderr\": 0.025305258131879702\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.3865921787709497,\n \"acc_stderr\": 0.01628667487910102,\n \"acc_norm\": 0.3865921787709497,\n \"acc_norm_stderr\": 0.01628667487910102\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.6013071895424836,\n \"acc_stderr\": 0.028036092273891772,\n \"acc_norm\": 0.6013071895424836,\n \"acc_norm_stderr\": 0.028036092273891772\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.6334405144694534,\n \"acc_stderr\": 0.02736807824397163,\n \"acc_norm\": 0.6334405144694534,\n \"acc_norm_stderr\": 0.02736807824397163\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.6327160493827161,\n \"acc_stderr\": 0.02682280175950789,\n \"acc_norm\": 0.6327160493827161,\n \"acc_norm_stderr\": 0.02682280175950789\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.4219858156028369,\n \"acc_stderr\": 0.029462189233370597,\n \"acc_norm\": 0.4219858156028369,\n \"acc_norm_stderr\": 0.029462189233370597\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.42242503259452413,\n \"acc_stderr\": 0.012615600475734921,\n \"acc_norm\": 0.42242503259452413,\n \"acc_norm_stderr\": 0.012615600475734921\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.47794117647058826,\n \"acc_stderr\": 0.030343264224213535,\n \"acc_norm\": 0.47794117647058826,\n \"acc_norm_stderr\": 0.030343264224213535\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.5490196078431373,\n \"acc_stderr\": 0.020130388312904528,\n \"acc_norm\": 0.5490196078431373,\n \"acc_norm_stderr\": 0.020130388312904528\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6454545454545455,\n \"acc_stderr\": 0.045820048415054174,\n \"acc_norm\": 0.6454545454545455,\n \"acc_norm_stderr\": 0.045820048415054174\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.6571428571428571,\n \"acc_stderr\": 0.030387262919547728,\n \"acc_norm\": 0.6571428571428571,\n \"acc_norm_stderr\": 0.030387262919547728\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.7263681592039801,\n \"acc_stderr\": 0.031524391865554016,\n \"acc_norm\": 0.7263681592039801,\n \"acc_norm_stderr\": 0.031524391865554016\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.8,\n \"acc_stderr\": 0.04020151261036847,\n \"acc_norm\": 0.8,\n \"acc_norm_stderr\": 0.04020151261036847\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.42771084337349397,\n \"acc_stderr\": 0.038515976837185335,\n \"acc_norm\": 0.42771084337349397,\n \"acc_norm_stderr\": 0.038515976837185335\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.8070175438596491,\n \"acc_stderr\": 0.030267457554898458,\n \"acc_norm\": 0.8070175438596491,\n \"acc_norm_stderr\": 0.030267457554898458\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.29253365973072215,\n \"mc1_stderr\": 0.015925597445286165,\n \"mc2\": 0.42324311782084495,\n \"mc2_stderr\": 0.014813023987866733\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.744277821625888,\n \"acc_stderr\": 0.012261253845440474\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.2812736921910538,\n \"acc_stderr\": 0.012384789310940236\n }\n}\n```", "repo_url": "https://huggingface.co/pleisto/yuren-13b-chatml", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_02_02T14_25_58.778785", "path": ["**/details_harness|arc:challenge|25_2024-02-02T14-25-58.778785.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-02-02T14-25-58.778785.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_02_02T14_25_58.778785", "path": ["**/details_harness|gsm8k|5_2024-02-02T14-25-58.778785.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-02-02T14-25-58.778785.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_02_02T14_25_58.778785", "path": ["**/details_harness|hellaswag|10_2024-02-02T14-25-58.778785.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-02-02T14-25-58.778785.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_02_02T14_25_58.778785", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-02T14-25-58.778785.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-02-02T14-25-58.778785.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-02-02T14-25-58.778785.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-02-02T14-25-58.778785.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-02T14-25-58.778785.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-02-02T14-25-58.778785.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-02-02T14-25-58.778785.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-02-02T14-25-58.778785.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-02-02T14-25-58.778785.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-02-02T14-25-58.778785.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-02-02T14-25-58.778785.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-02-02T14-25-58.778785.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-02T14-25-58.778785.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-02-02T14-25-58.778785.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-02T14-25-58.778785.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-02T14-25-58.778785.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-02-02T14-25-58.778785.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-02-02T14-25-58.778785.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-02-02T14-25-58.778785.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-02T14-25-58.778785.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-02T14-25-58.778785.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-02T14-25-58.778785.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-02-02T14-25-58.778785.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-02T14-25-58.778785.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-02T14-25-58.778785.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-02T14-25-58.778785.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-02T14-25-58.778785.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-02-02T14-25-58.778785.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-02T14-25-58.778785.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-02T14-25-58.778785.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-02T14-25-58.778785.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-02T14-25-58.778785.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-02-02T14-25-58.778785.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-02-02T14-25-58.778785.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-02-02T14-25-58.778785.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-02-02T14-25-58.778785.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-02T14-25-58.778785.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-02-02T14-25-58.778785.parquet", "**/details_harness|hendrycksTest-management|5_2024-02-02T14-25-58.778785.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-02-02T14-25-58.778785.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-02-02T14-25-58.778785.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-02-02T14-25-58.778785.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-02-02T14-25-58.778785.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-02T14-25-58.778785.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-02-02T14-25-58.778785.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-02-02T14-25-58.778785.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-02-02T14-25-58.778785.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-02-02T14-25-58.778785.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-02-02T14-25-58.778785.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-02-02T14-25-58.778785.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-02-02T14-25-58.778785.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-02-02T14-25-58.778785.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-02-02T14-25-58.778785.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-02-02T14-25-58.778785.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-02T14-25-58.778785.parquet", "**/details_harness|hendrycksTest-virology|5_2024-02-02T14-25-58.778785.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-02-02T14-25-58.778785.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-02T14-25-58.778785.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-02-02T14-25-58.778785.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-02-02T14-25-58.778785.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-02-02T14-25-58.778785.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-02T14-25-58.778785.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-02-02T14-25-58.778785.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-02-02T14-25-58.778785.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-02-02T14-25-58.778785.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-02-02T14-25-58.778785.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-02-02T14-25-58.778785.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-02-02T14-25-58.778785.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-02-02T14-25-58.778785.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-02T14-25-58.778785.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-02-02T14-25-58.778785.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-02T14-25-58.778785.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-02T14-25-58.778785.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-02-02T14-25-58.778785.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-02-02T14-25-58.778785.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-02-02T14-25-58.778785.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-02T14-25-58.778785.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-02T14-25-58.778785.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-02T14-25-58.778785.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-02-02T14-25-58.778785.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-02T14-25-58.778785.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-02T14-25-58.778785.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-02T14-25-58.778785.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-02T14-25-58.778785.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-02-02T14-25-58.778785.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-02T14-25-58.778785.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-02T14-25-58.778785.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-02T14-25-58.778785.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-02T14-25-58.778785.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-02-02T14-25-58.778785.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-02-02T14-25-58.778785.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-02-02T14-25-58.778785.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-02-02T14-25-58.778785.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-02T14-25-58.778785.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-02-02T14-25-58.778785.parquet", "**/details_harness|hendrycksTest-management|5_2024-02-02T14-25-58.778785.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-02-02T14-25-58.778785.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-02-02T14-25-58.778785.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-02-02T14-25-58.778785.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-02-02T14-25-58.778785.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-02T14-25-58.778785.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-02-02T14-25-58.778785.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-02-02T14-25-58.778785.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-02-02T14-25-58.778785.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-02-02T14-25-58.778785.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-02-02T14-25-58.778785.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-02-02T14-25-58.778785.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-02-02T14-25-58.778785.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-02-02T14-25-58.778785.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-02-02T14-25-58.778785.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-02-02T14-25-58.778785.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-02T14-25-58.778785.parquet", "**/details_harness|hendrycksTest-virology|5_2024-02-02T14-25-58.778785.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-02-02T14-25-58.778785.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_02_02T14_25_58.778785", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-02T14-25-58.778785.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-02T14-25-58.778785.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_02_02T14_25_58.778785", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-02-02T14-25-58.778785.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-02-02T14-25-58.778785.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_02_02T14_25_58.778785", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-02-02T14-25-58.778785.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-02-02T14-25-58.778785.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_02_02T14_25_58.778785", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-02-02T14-25-58.778785.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-02-02T14-25-58.778785.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_02_02T14_25_58.778785", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-02T14-25-58.778785.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-02T14-25-58.778785.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_02_02T14_25_58.778785", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-02-02T14-25-58.778785.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-02-02T14-25-58.778785.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_02_02T14_25_58.778785", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-02-02T14-25-58.778785.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-02-02T14-25-58.778785.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_02_02T14_25_58.778785", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-02-02T14-25-58.778785.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-02-02T14-25-58.778785.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_02_02T14_25_58.778785", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-02-02T14-25-58.778785.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-02-02T14-25-58.778785.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_02_02T14_25_58.778785", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-02-02T14-25-58.778785.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-02-02T14-25-58.778785.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_02_02T14_25_58.778785", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-02-02T14-25-58.778785.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-02-02T14-25-58.778785.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_02_02T14_25_58.778785", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-02-02T14-25-58.778785.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-02-02T14-25-58.778785.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_02_02T14_25_58.778785", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-02T14-25-58.778785.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-02T14-25-58.778785.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_02_02T14_25_58.778785", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-02-02T14-25-58.778785.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-02-02T14-25-58.778785.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_02_02T14_25_58.778785", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-02T14-25-58.778785.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-02T14-25-58.778785.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_02_02T14_25_58.778785", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-02T14-25-58.778785.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-02T14-25-58.778785.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_02_02T14_25_58.778785", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-02-02T14-25-58.778785.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-02-02T14-25-58.778785.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_02_02T14_25_58.778785", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-02-02T14-25-58.778785.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-02-02T14-25-58.778785.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_02_02T14_25_58.778785", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-02-02T14-25-58.778785.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-02-02T14-25-58.778785.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_02_02T14_25_58.778785", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-02T14-25-58.778785.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-02T14-25-58.778785.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_02_02T14_25_58.778785", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-02T14-25-58.778785.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-02T14-25-58.778785.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_02_02T14_25_58.778785", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-02T14-25-58.778785.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-02T14-25-58.778785.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_02_02T14_25_58.778785", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-02-02T14-25-58.778785.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-02-02T14-25-58.778785.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_02_02T14_25_58.778785", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-02T14-25-58.778785.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-02T14-25-58.778785.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_02_02T14_25_58.778785", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-02T14-25-58.778785.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-02T14-25-58.778785.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_02_02T14_25_58.778785", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-02T14-25-58.778785.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-02T14-25-58.778785.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_02_02T14_25_58.778785", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-02T14-25-58.778785.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-02T14-25-58.778785.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_02_02T14_25_58.778785", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-02-02T14-25-58.778785.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-02-02T14-25-58.778785.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_02_02T14_25_58.778785", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-02T14-25-58.778785.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-02T14-25-58.778785.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_02_02T14_25_58.778785", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-02T14-25-58.778785.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-02T14-25-58.778785.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_02_02T14_25_58.778785", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-02T14-25-58.778785.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-02T14-25-58.778785.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_02_02T14_25_58.778785", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-02T14-25-58.778785.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-02T14-25-58.778785.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_02_02T14_25_58.778785", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-02-02T14-25-58.778785.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-02-02T14-25-58.778785.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_02_02T14_25_58.778785", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-02-02T14-25-58.778785.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-02-02T14-25-58.778785.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_02_02T14_25_58.778785", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-02-02T14-25-58.778785.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-02-02T14-25-58.778785.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_02_02T14_25_58.778785", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-02-02T14-25-58.778785.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-02-02T14-25-58.778785.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_02_02T14_25_58.778785", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-02T14-25-58.778785.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-02T14-25-58.778785.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_02_02T14_25_58.778785", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-02-02T14-25-58.778785.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-02-02T14-25-58.778785.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_02_02T14_25_58.778785", "path": ["**/details_harness|hendrycksTest-management|5_2024-02-02T14-25-58.778785.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-02-02T14-25-58.778785.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_02_02T14_25_58.778785", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-02-02T14-25-58.778785.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-02-02T14-25-58.778785.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_02_02T14_25_58.778785", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-02-02T14-25-58.778785.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-02-02T14-25-58.778785.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_02_02T14_25_58.778785", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-02-02T14-25-58.778785.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-02-02T14-25-58.778785.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_02_02T14_25_58.778785", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-02-02T14-25-58.778785.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-02-02T14-25-58.778785.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_02_02T14_25_58.778785", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-02T14-25-58.778785.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-02T14-25-58.778785.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_02_02T14_25_58.778785", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-02-02T14-25-58.778785.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-02-02T14-25-58.778785.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_02_02T14_25_58.778785", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-02-02T14-25-58.778785.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-02-02T14-25-58.778785.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_02_02T14_25_58.778785", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-02-02T14-25-58.778785.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-02-02T14-25-58.778785.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_02_02T14_25_58.778785", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-02-02T14-25-58.778785.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-02-02T14-25-58.778785.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_02_02T14_25_58.778785", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-02-02T14-25-58.778785.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-02-02T14-25-58.778785.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_02_02T14_25_58.778785", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-02-02T14-25-58.778785.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-02-02T14-25-58.778785.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_02_02T14_25_58.778785", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-02-02T14-25-58.778785.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-02-02T14-25-58.778785.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_02_02T14_25_58.778785", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-02-02T14-25-58.778785.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-02-02T14-25-58.778785.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_02_02T14_25_58.778785", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-02-02T14-25-58.778785.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-02-02T14-25-58.778785.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_02_02T14_25_58.778785", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-02-02T14-25-58.778785.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-02-02T14-25-58.778785.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_02_02T14_25_58.778785", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-02T14-25-58.778785.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-02T14-25-58.778785.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_02_02T14_25_58.778785", "path": ["**/details_harness|hendrycksTest-virology|5_2024-02-02T14-25-58.778785.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-02-02T14-25-58.778785.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_02_02T14_25_58.778785", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-02-02T14-25-58.778785.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-02-02T14-25-58.778785.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_02_02T14_25_58.778785", "path": ["**/details_harness|truthfulqa:mc|0_2024-02-02T14-25-58.778785.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-02-02T14-25-58.778785.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_02_02T14_25_58.778785", "path": ["**/details_harness|winogrande|5_2024-02-02T14-25-58.778785.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-02-02T14-25-58.778785.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_02_02T14_25_58.778785", "path": ["results_2024-02-02T14-25-58.778785.parquet"]}, {"split": "latest", "path": ["results_2024-02-02T14-25-58.778785.parquet"]}]}]}
2024-02-02T14:28:43+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of pleisto/yuren-13b-chatml Dataset automatically created during the evaluation run of model pleisto/yuren-13b-chatml on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-02-02T14:25:58.778785(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of pleisto/yuren-13b-chatml\n\n\n\nDataset automatically created during the evaluation run of model pleisto/yuren-13b-chatml on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-02-02T14:25:58.778785(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of pleisto/yuren-13b-chatml\n\n\n\nDataset automatically created during the evaluation run of model pleisto/yuren-13b-chatml on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-02-02T14:25:58.778785(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
15f5dd9096a7ce58eac7724e848cda602355d74b
# Dataset Card for "Capybara" ## Dataset Description - **Repository: https://github.com/AISE-TUDelft/Capybara-BinT5** - **Paper: https://huggingface.co/papers/2301.01701** - **Point of Contact: https://huggingface.co/aalkaswan** - **Raw Data: https://zenodo.org/records/7229809** ### Dataset Summary Dataset used to train [BinT5](https://huggingface.co/collections/AISE-TUDelft/bint5-65bd006a8c90bd5c97485244). Please refer to the paper for more information. ### Citation Information ``` @inproceedings{alkaswan2023extending, title={Extending Source Code Pre-Trained Language Models to Summarise Decompiled Binaries}, author={Al-Kaswan, Ali and Ahmed, Toufique and Izadi, Maliheh and Sawant, Anand Ashok and Devanbu, Premkumar and van Deursen, Arie}, booktitle={2023 IEEE International Conference on Software Analysis, Evolution and Reengineering (SANER)}, pages={260--271}, year={2023}, organization={IEEE} } ```
AISE-TUDelft/Capybara
[ "task_categories:summarization", "size_categories:100K<n<1M", "license:apache-2.0", "code", "Reverse Engineering", "Binary", "Code Summarization", "arxiv:2301.01701", "region:us" ]
2024-02-02T14:35:23+00:00
{"license": "apache-2.0", "size_categories": ["100K<n<1M"], "task_categories": ["summarization"], "configs": [{"config_name": "default", "data_files": [{"split": "dedup_C", "path": "data/dedup_C-*"}, {"split": "dup_C", "path": "data/dup_C-*"}, {"split": "dedup_DecomC", "path": "data/dedup_DecomC-*"}, {"split": "dup_DecomC", "path": "data/dup_DecomC-*"}, {"split": "dedup_demiStripped", "path": "data/dedup_demiStripped-*"}, {"split": "dup_demiStripped", "path": "data/dup_demiStripped-*"}, {"split": "no_fun_demiStripped", "path": "data/no_fun_demiStripped-*"}, {"split": "dup_stripped", "path": "data/dup_stripped-*"}, {"split": "dedup_stripped", "path": "data/dedup_stripped-*"}]}], "dataset_info": {"features": [{"name": "id", "dtype": "int64"}, {"name": "docstring_tokens", "sequence": "string"}, {"name": "code_tokens", "sequence": "string"}, {"name": "fun_name", "dtype": "string"}, {"name": "repo", "dtype": "string"}, {"name": "starting", "dtype": "string"}, {"name": "partition", "dtype": "string"}, {"name": "__index_level_0__", "dtype": "int64"}], "splits": [{"name": "dedup_C", "num_bytes": 167770495, "num_examples": 79673}, {"name": "dup_C", "num_bytes": 348707539, "num_examples": 214587}, {"name": "dedup_DecomC", "num_bytes": 330052224, "num_examples": 79673}, {"name": "dup_DecomC", "num_bytes": 614158883, "num_examples": 214587}, {"name": "dedup_demiStripped", "num_bytes": 316991021, "num_examples": 79673}, {"name": "dup_demiStripped", "num_bytes": 590234671, "num_examples": 214587}, {"name": "no_fun_demiStripped", "num_bytes": 606914210, "num_examples": 214587}, {"name": "dup_stripped", "num_bytes": 60563000, "num_examples": 14245}, {"name": "dedup_stripped", "num_bytes": 40485701, "num_examples": 7826}], "download_size": 592873091, "dataset_size": 3075877744}, "tags": ["code", "Reverse Engineering", "Binary", "Code Summarization"]}
2024-02-02T15:03:16+00:00
[ "2301.01701" ]
[]
TAGS #task_categories-summarization #size_categories-100K<n<1M #license-apache-2.0 #code #Reverse Engineering #Binary #Code Summarization #arxiv-2301.01701 #region-us
# Dataset Card for "Capybara" ## Dataset Description - Repository: URL - Paper: URL - Point of Contact: URL - Raw Data: URL ### Dataset Summary Dataset used to train BinT5. Please refer to the paper for more information.
[ "# Dataset Card for \"Capybara\"", "## Dataset Description\n\n- Repository: URL\n- Paper: URL\n- Point of Contact: URL\n- Raw Data: URL", "### Dataset Summary\n\nDataset used to train BinT5. Please refer to the paper for more information." ]
[ "TAGS\n#task_categories-summarization #size_categories-100K<n<1M #license-apache-2.0 #code #Reverse Engineering #Binary #Code Summarization #arxiv-2301.01701 #region-us \n", "# Dataset Card for \"Capybara\"", "## Dataset Description\n\n- Repository: URL\n- Paper: URL\n- Point of Contact: URL\n- Raw Data: URL", "### Dataset Summary\n\nDataset used to train BinT5. Please refer to the paper for more information." ]
a1a5633046033760bf0aeb2fadafbf8f58911743
# genius_french_rap_corpus
regicid/La_france_au_rap_francais
[ "region:us" ]
2024-02-02T14:50:08+00:00
{}
2024-02-02T15:01:50+00:00
[]
[]
TAGS #region-us
# genius_french_rap_corpus
[ "# genius_french_rap_corpus" ]
[ "TAGS\n#region-us \n", "# genius_french_rap_corpus" ]
5beda5327ff557279db3e700b3fe342b43e37903
# Dataset Card for Evaluation run of Evaloric/Evaloric-1.1B <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [Evaloric/Evaloric-1.1B](https://huggingface.co/Evaloric/Evaloric-1.1B) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_Evaloric__Evaloric-1.1B", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-02-02T15:22:05.810846](https://huggingface.co/datasets/open-llm-leaderboard/details_Evaloric__Evaloric-1.1B/blob/main/results_2024-02-02T15-22-05.810846.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.26094744374650203, "acc_stderr": 0.03084373131854147, "acc_norm": 0.2612418177103961, "acc_norm_stderr": 0.03161207546907615, "mc1": 0.23378212974296206, "mc1_stderr": 0.014816195991931586, "mc2": 0.3778468793879615, "mc2_stderr": 0.0139393481480023 }, "harness|arc:challenge|25": { "acc": 0.34215017064846415, "acc_stderr": 0.01386415215917728, "acc_norm": 0.3506825938566553, "acc_norm_stderr": 0.013944635930726087 }, "harness|hellaswag|10": { "acc": 0.4614618601872137, "acc_stderr": 0.004974937803907464, "acc_norm": 0.6093407687711612, "acc_norm_stderr": 0.004869010152280748 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.24, "acc_stderr": 0.04292346959909284, "acc_norm": 0.24, "acc_norm_stderr": 0.04292346959909284 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.14814814814814814, "acc_stderr": 0.03068864761035268, "acc_norm": 0.14814814814814814, "acc_norm_stderr": 0.03068864761035268 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.17105263157894737, "acc_stderr": 0.030643607071677077, "acc_norm": 0.17105263157894737, "acc_norm_stderr": 0.030643607071677077 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.24, "acc_stderr": 0.042923469599092816, "acc_norm": 0.24, "acc_norm_stderr": 0.042923469599092816 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.27169811320754716, "acc_stderr": 0.027377706624670713, "acc_norm": 0.27169811320754716, "acc_norm_stderr": 0.027377706624670713 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.2222222222222222, "acc_stderr": 0.03476590104304134, "acc_norm": 0.2222222222222222, "acc_norm_stderr": 0.03476590104304134 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.24, "acc_stderr": 0.04292346959909283, "acc_norm": 0.24, "acc_norm_stderr": 0.04292346959909283 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.27, "acc_stderr": 0.0446196043338474, "acc_norm": 0.27, "acc_norm_stderr": 0.0446196043338474 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.35, "acc_stderr": 0.0479372485441102, "acc_norm": 0.35, "acc_norm_stderr": 0.0479372485441102 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.2023121387283237, "acc_stderr": 0.030631145539198826, "acc_norm": 0.2023121387283237, "acc_norm_stderr": 0.030631145539198826 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.19607843137254902, "acc_stderr": 0.03950581861179961, "acc_norm": 0.19607843137254902, "acc_norm_stderr": 0.03950581861179961 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.29, "acc_stderr": 0.045604802157206845, "acc_norm": 0.29, "acc_norm_stderr": 0.045604802157206845 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.2936170212765957, "acc_stderr": 0.029771642712491227, "acc_norm": 0.2936170212765957, "acc_norm_stderr": 0.029771642712491227 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.2543859649122807, "acc_stderr": 0.040969851398436716, "acc_norm": 0.2543859649122807, "acc_norm_stderr": 0.040969851398436716 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.22758620689655173, "acc_stderr": 0.03493950380131183, "acc_norm": 0.22758620689655173, "acc_norm_stderr": 0.03493950380131183 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.26455026455026454, "acc_stderr": 0.022717467897708614, "acc_norm": 0.26455026455026454, "acc_norm_stderr": 0.022717467897708614 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.1746031746031746, "acc_stderr": 0.033954900208561116, "acc_norm": 0.1746031746031746, "acc_norm_stderr": 0.033954900208561116 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.31, "acc_stderr": 0.04648231987117316, "acc_norm": 0.31, "acc_norm_stderr": 0.04648231987117316 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.24193548387096775, "acc_stderr": 0.024362599693031086, "acc_norm": 0.24193548387096775, "acc_norm_stderr": 0.024362599693031086 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.270935960591133, "acc_stderr": 0.031270907132976984, "acc_norm": 0.270935960591133, "acc_norm_stderr": 0.031270907132976984 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.23, "acc_stderr": 0.04229525846816505, "acc_norm": 0.23, "acc_norm_stderr": 0.04229525846816505 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.26666666666666666, "acc_stderr": 0.03453131801885415, "acc_norm": 0.26666666666666666, "acc_norm_stderr": 0.03453131801885415 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.22727272727272727, "acc_stderr": 0.02985751567338641, "acc_norm": 0.22727272727272727, "acc_norm_stderr": 0.02985751567338641 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.21761658031088082, "acc_stderr": 0.029778663037752954, "acc_norm": 0.21761658031088082, "acc_norm_stderr": 0.029778663037752954 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.24102564102564103, "acc_stderr": 0.02168554666533319, "acc_norm": 0.24102564102564103, "acc_norm_stderr": 0.02168554666533319 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.26666666666666666, "acc_stderr": 0.026962424325073845, "acc_norm": 0.26666666666666666, "acc_norm_stderr": 0.026962424325073845 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.24789915966386555, "acc_stderr": 0.028047967224176896, "acc_norm": 0.24789915966386555, "acc_norm_stderr": 0.028047967224176896 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.23841059602649006, "acc_stderr": 0.034791855725996586, "acc_norm": 0.23841059602649006, "acc_norm_stderr": 0.034791855725996586 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.23853211009174313, "acc_stderr": 0.01827257581023187, "acc_norm": 0.23853211009174313, "acc_norm_stderr": 0.01827257581023187 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.39351851851851855, "acc_stderr": 0.03331747876370312, "acc_norm": 0.39351851851851855, "acc_norm_stderr": 0.03331747876370312 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.2549019607843137, "acc_stderr": 0.030587591351604257, "acc_norm": 0.2549019607843137, "acc_norm_stderr": 0.030587591351604257 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.27848101265822783, "acc_stderr": 0.029178682304842538, "acc_norm": 0.27848101265822783, "acc_norm_stderr": 0.029178682304842538 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.36771300448430494, "acc_stderr": 0.03236198350928275, "acc_norm": 0.36771300448430494, "acc_norm_stderr": 0.03236198350928275 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.2366412213740458, "acc_stderr": 0.037276735755969195, "acc_norm": 0.2366412213740458, "acc_norm_stderr": 0.037276735755969195 }, "harness|hendrycksTest-international_law|5": { "acc": 0.24793388429752067, "acc_stderr": 0.039418975265163025, "acc_norm": 0.24793388429752067, "acc_norm_stderr": 0.039418975265163025 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.2222222222222222, "acc_stderr": 0.0401910747255735, "acc_norm": 0.2222222222222222, "acc_norm_stderr": 0.0401910747255735 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.26380368098159507, "acc_stderr": 0.034624199316156234, "acc_norm": 0.26380368098159507, "acc_norm_stderr": 0.034624199316156234 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.29464285714285715, "acc_stderr": 0.04327040932578728, "acc_norm": 0.29464285714285715, "acc_norm_stderr": 0.04327040932578728 }, "harness|hendrycksTest-management|5": { "acc": 0.2621359223300971, "acc_stderr": 0.04354631077260597, "acc_norm": 0.2621359223300971, "acc_norm_stderr": 0.04354631077260597 }, "harness|hendrycksTest-marketing|5": { "acc": 0.2692307692307692, "acc_stderr": 0.029058588303748842, "acc_norm": 0.2692307692307692, "acc_norm_stderr": 0.029058588303748842 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.26, "acc_stderr": 0.044084400227680794, "acc_norm": 0.26, "acc_norm_stderr": 0.044084400227680794 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.28607918263090676, "acc_stderr": 0.016160871405127526, "acc_norm": 0.28607918263090676, "acc_norm_stderr": 0.016160871405127526 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.2254335260115607, "acc_stderr": 0.02249723019096755, "acc_norm": 0.2254335260115607, "acc_norm_stderr": 0.02249723019096755 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.22681564245810057, "acc_stderr": 0.014005843570897899, "acc_norm": 0.22681564245810057, "acc_norm_stderr": 0.014005843570897899 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.238562091503268, "acc_stderr": 0.02440439492808787, "acc_norm": 0.238562091503268, "acc_norm_stderr": 0.02440439492808787 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.2733118971061093, "acc_stderr": 0.02531176597542612, "acc_norm": 0.2733118971061093, "acc_norm_stderr": 0.02531176597542612 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.2623456790123457, "acc_stderr": 0.02447722285613511, "acc_norm": 0.2623456790123457, "acc_norm_stderr": 0.02447722285613511 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.2765957446808511, "acc_stderr": 0.026684564340460997, "acc_norm": 0.2765957446808511, "acc_norm_stderr": 0.026684564340460997 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.23728813559322035, "acc_stderr": 0.010865436690780278, "acc_norm": 0.23728813559322035, "acc_norm_stderr": 0.010865436690780278 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.21323529411764705, "acc_stderr": 0.024880971512294268, "acc_norm": 0.21323529411764705, "acc_norm_stderr": 0.024880971512294268 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.2647058823529412, "acc_stderr": 0.017848089574913226, "acc_norm": 0.2647058823529412, "acc_norm_stderr": 0.017848089574913226 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.34545454545454546, "acc_stderr": 0.04554619617541054, "acc_norm": 0.34545454545454546, "acc_norm_stderr": 0.04554619617541054 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.15510204081632653, "acc_stderr": 0.0231747988612186, "acc_norm": 0.15510204081632653, "acc_norm_stderr": 0.0231747988612186 }, "harness|hendrycksTest-sociology|5": { "acc": 0.23880597014925373, "acc_stderr": 0.030147775935409224, "acc_norm": 0.23880597014925373, "acc_norm_stderr": 0.030147775935409224 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.24, "acc_stderr": 0.04292346959909282, "acc_norm": 0.24, "acc_norm_stderr": 0.04292346959909282 }, "harness|hendrycksTest-virology|5": { "acc": 0.3253012048192771, "acc_stderr": 0.03647168523683227, "acc_norm": 0.3253012048192771, "acc_norm_stderr": 0.03647168523683227 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.23976608187134502, "acc_stderr": 0.03274485211946956, "acc_norm": 0.23976608187134502, "acc_norm_stderr": 0.03274485211946956 }, "harness|truthfulqa:mc|0": { "mc1": 0.23378212974296206, "mc1_stderr": 0.014816195991931586, "mc2": 0.3778468793879615, "mc2_stderr": 0.0139393481480023 }, "harness|winogrande|5": { "acc": 0.6495659037095501, "acc_stderr": 0.013409047676670184 }, "harness|gsm8k|5": { "acc": 0.011372251705837756, "acc_stderr": 0.00292066619878875 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_Evaloric__Evaloric-1.1B
[ "region:us" ]
2024-02-02T15:23:53+00:00
{"pretty_name": "Evaluation run of Evaloric/Evaloric-1.1B", "dataset_summary": "Dataset automatically created during the evaluation run of model [Evaloric/Evaloric-1.1B](https://huggingface.co/Evaloric/Evaloric-1.1B) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_Evaloric__Evaloric-1.1B\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-02-02T15:22:05.810846](https://huggingface.co/datasets/open-llm-leaderboard/details_Evaloric__Evaloric-1.1B/blob/main/results_2024-02-02T15-22-05.810846.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.26094744374650203,\n \"acc_stderr\": 0.03084373131854147,\n \"acc_norm\": 0.2612418177103961,\n \"acc_norm_stderr\": 0.03161207546907615,\n \"mc1\": 0.23378212974296206,\n \"mc1_stderr\": 0.014816195991931586,\n \"mc2\": 0.3778468793879615,\n \"mc2_stderr\": 0.0139393481480023\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.34215017064846415,\n \"acc_stderr\": 0.01386415215917728,\n \"acc_norm\": 0.3506825938566553,\n \"acc_norm_stderr\": 0.013944635930726087\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.4614618601872137,\n \"acc_stderr\": 0.004974937803907464,\n \"acc_norm\": 0.6093407687711612,\n \"acc_norm_stderr\": 0.004869010152280748\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.24,\n \"acc_stderr\": 0.04292346959909284,\n \"acc_norm\": 0.24,\n \"acc_norm_stderr\": 0.04292346959909284\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.14814814814814814,\n \"acc_stderr\": 0.03068864761035268,\n \"acc_norm\": 0.14814814814814814,\n \"acc_norm_stderr\": 0.03068864761035268\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.17105263157894737,\n \"acc_stderr\": 0.030643607071677077,\n \"acc_norm\": 0.17105263157894737,\n \"acc_norm_stderr\": 0.030643607071677077\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.24,\n \"acc_stderr\": 0.042923469599092816,\n \"acc_norm\": 0.24,\n \"acc_norm_stderr\": 0.042923469599092816\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.27169811320754716,\n \"acc_stderr\": 0.027377706624670713,\n \"acc_norm\": 0.27169811320754716,\n \"acc_norm_stderr\": 0.027377706624670713\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.2222222222222222,\n \"acc_stderr\": 0.03476590104304134,\n \"acc_norm\": 0.2222222222222222,\n \"acc_norm_stderr\": 0.03476590104304134\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.24,\n \"acc_stderr\": 0.04292346959909283,\n \"acc_norm\": 0.24,\n \"acc_norm_stderr\": 0.04292346959909283\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.27,\n \"acc_stderr\": 0.0446196043338474,\n \"acc_norm\": 0.27,\n \"acc_norm_stderr\": 0.0446196043338474\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.35,\n \"acc_stderr\": 0.0479372485441102,\n \"acc_norm\": 0.35,\n \"acc_norm_stderr\": 0.0479372485441102\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.2023121387283237,\n \"acc_stderr\": 0.030631145539198826,\n \"acc_norm\": 0.2023121387283237,\n \"acc_norm_stderr\": 0.030631145539198826\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.19607843137254902,\n \"acc_stderr\": 0.03950581861179961,\n \"acc_norm\": 0.19607843137254902,\n \"acc_norm_stderr\": 0.03950581861179961\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.29,\n \"acc_stderr\": 0.045604802157206845,\n \"acc_norm\": 0.29,\n \"acc_norm_stderr\": 0.045604802157206845\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.2936170212765957,\n \"acc_stderr\": 0.029771642712491227,\n \"acc_norm\": 0.2936170212765957,\n \"acc_norm_stderr\": 0.029771642712491227\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.2543859649122807,\n \"acc_stderr\": 0.040969851398436716,\n \"acc_norm\": 0.2543859649122807,\n \"acc_norm_stderr\": 0.040969851398436716\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.22758620689655173,\n \"acc_stderr\": 0.03493950380131183,\n \"acc_norm\": 0.22758620689655173,\n \"acc_norm_stderr\": 0.03493950380131183\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.26455026455026454,\n \"acc_stderr\": 0.022717467897708614,\n \"acc_norm\": 0.26455026455026454,\n \"acc_norm_stderr\": 0.022717467897708614\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.1746031746031746,\n \"acc_stderr\": 0.033954900208561116,\n \"acc_norm\": 0.1746031746031746,\n \"acc_norm_stderr\": 0.033954900208561116\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.31,\n \"acc_stderr\": 0.04648231987117316,\n \"acc_norm\": 0.31,\n \"acc_norm_stderr\": 0.04648231987117316\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.24193548387096775,\n \"acc_stderr\": 0.024362599693031086,\n \"acc_norm\": 0.24193548387096775,\n \"acc_norm_stderr\": 0.024362599693031086\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.270935960591133,\n \"acc_stderr\": 0.031270907132976984,\n \"acc_norm\": 0.270935960591133,\n \"acc_norm_stderr\": 0.031270907132976984\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.23,\n \"acc_stderr\": 0.04229525846816505,\n \"acc_norm\": 0.23,\n \"acc_norm_stderr\": 0.04229525846816505\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.26666666666666666,\n \"acc_stderr\": 0.03453131801885415,\n \"acc_norm\": 0.26666666666666666,\n \"acc_norm_stderr\": 0.03453131801885415\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.22727272727272727,\n \"acc_stderr\": 0.02985751567338641,\n \"acc_norm\": 0.22727272727272727,\n \"acc_norm_stderr\": 0.02985751567338641\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.21761658031088082,\n \"acc_stderr\": 0.029778663037752954,\n \"acc_norm\": 0.21761658031088082,\n \"acc_norm_stderr\": 0.029778663037752954\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.24102564102564103,\n \"acc_stderr\": 0.02168554666533319,\n \"acc_norm\": 0.24102564102564103,\n \"acc_norm_stderr\": 0.02168554666533319\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.26666666666666666,\n \"acc_stderr\": 0.026962424325073845,\n \"acc_norm\": 0.26666666666666666,\n \"acc_norm_stderr\": 0.026962424325073845\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.24789915966386555,\n \"acc_stderr\": 0.028047967224176896,\n \"acc_norm\": 0.24789915966386555,\n \"acc_norm_stderr\": 0.028047967224176896\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.23841059602649006,\n \"acc_stderr\": 0.034791855725996586,\n \"acc_norm\": 0.23841059602649006,\n \"acc_norm_stderr\": 0.034791855725996586\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.23853211009174313,\n \"acc_stderr\": 0.01827257581023187,\n \"acc_norm\": 0.23853211009174313,\n \"acc_norm_stderr\": 0.01827257581023187\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.39351851851851855,\n \"acc_stderr\": 0.03331747876370312,\n \"acc_norm\": 0.39351851851851855,\n \"acc_norm_stderr\": 0.03331747876370312\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.2549019607843137,\n \"acc_stderr\": 0.030587591351604257,\n \"acc_norm\": 0.2549019607843137,\n \"acc_norm_stderr\": 0.030587591351604257\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.27848101265822783,\n \"acc_stderr\": 0.029178682304842538,\n \"acc_norm\": 0.27848101265822783,\n \"acc_norm_stderr\": 0.029178682304842538\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.36771300448430494,\n \"acc_stderr\": 0.03236198350928275,\n \"acc_norm\": 0.36771300448430494,\n \"acc_norm_stderr\": 0.03236198350928275\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.2366412213740458,\n \"acc_stderr\": 0.037276735755969195,\n \"acc_norm\": 0.2366412213740458,\n \"acc_norm_stderr\": 0.037276735755969195\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.24793388429752067,\n \"acc_stderr\": 0.039418975265163025,\n \"acc_norm\": 0.24793388429752067,\n \"acc_norm_stderr\": 0.039418975265163025\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.2222222222222222,\n \"acc_stderr\": 0.0401910747255735,\n \"acc_norm\": 0.2222222222222222,\n \"acc_norm_stderr\": 0.0401910747255735\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.26380368098159507,\n \"acc_stderr\": 0.034624199316156234,\n \"acc_norm\": 0.26380368098159507,\n \"acc_norm_stderr\": 0.034624199316156234\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.29464285714285715,\n \"acc_stderr\": 0.04327040932578728,\n \"acc_norm\": 0.29464285714285715,\n \"acc_norm_stderr\": 0.04327040932578728\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.2621359223300971,\n \"acc_stderr\": 0.04354631077260597,\n \"acc_norm\": 0.2621359223300971,\n \"acc_norm_stderr\": 0.04354631077260597\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.2692307692307692,\n \"acc_stderr\": 0.029058588303748842,\n \"acc_norm\": 0.2692307692307692,\n \"acc_norm_stderr\": 0.029058588303748842\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.26,\n \"acc_stderr\": 0.044084400227680794,\n \"acc_norm\": 0.26,\n \"acc_norm_stderr\": 0.044084400227680794\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.28607918263090676,\n \"acc_stderr\": 0.016160871405127526,\n \"acc_norm\": 0.28607918263090676,\n \"acc_norm_stderr\": 0.016160871405127526\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.2254335260115607,\n \"acc_stderr\": 0.02249723019096755,\n \"acc_norm\": 0.2254335260115607,\n \"acc_norm_stderr\": 0.02249723019096755\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.22681564245810057,\n \"acc_stderr\": 0.014005843570897899,\n \"acc_norm\": 0.22681564245810057,\n \"acc_norm_stderr\": 0.014005843570897899\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.238562091503268,\n \"acc_stderr\": 0.02440439492808787,\n \"acc_norm\": 0.238562091503268,\n \"acc_norm_stderr\": 0.02440439492808787\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.2733118971061093,\n \"acc_stderr\": 0.02531176597542612,\n \"acc_norm\": 0.2733118971061093,\n \"acc_norm_stderr\": 0.02531176597542612\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.2623456790123457,\n \"acc_stderr\": 0.02447722285613511,\n \"acc_norm\": 0.2623456790123457,\n \"acc_norm_stderr\": 0.02447722285613511\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.2765957446808511,\n \"acc_stderr\": 0.026684564340460997,\n \"acc_norm\": 0.2765957446808511,\n \"acc_norm_stderr\": 0.026684564340460997\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.23728813559322035,\n \"acc_stderr\": 0.010865436690780278,\n \"acc_norm\": 0.23728813559322035,\n \"acc_norm_stderr\": 0.010865436690780278\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.21323529411764705,\n \"acc_stderr\": 0.024880971512294268,\n \"acc_norm\": 0.21323529411764705,\n \"acc_norm_stderr\": 0.024880971512294268\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.2647058823529412,\n \"acc_stderr\": 0.017848089574913226,\n \"acc_norm\": 0.2647058823529412,\n \"acc_norm_stderr\": 0.017848089574913226\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.34545454545454546,\n \"acc_stderr\": 0.04554619617541054,\n \"acc_norm\": 0.34545454545454546,\n \"acc_norm_stderr\": 0.04554619617541054\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.15510204081632653,\n \"acc_stderr\": 0.0231747988612186,\n \"acc_norm\": 0.15510204081632653,\n \"acc_norm_stderr\": 0.0231747988612186\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.23880597014925373,\n \"acc_stderr\": 0.030147775935409224,\n \"acc_norm\": 0.23880597014925373,\n \"acc_norm_stderr\": 0.030147775935409224\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.24,\n \"acc_stderr\": 0.04292346959909282,\n \"acc_norm\": 0.24,\n \"acc_norm_stderr\": 0.04292346959909282\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.3253012048192771,\n \"acc_stderr\": 0.03647168523683227,\n \"acc_norm\": 0.3253012048192771,\n \"acc_norm_stderr\": 0.03647168523683227\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.23976608187134502,\n \"acc_stderr\": 0.03274485211946956,\n \"acc_norm\": 0.23976608187134502,\n \"acc_norm_stderr\": 0.03274485211946956\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.23378212974296206,\n \"mc1_stderr\": 0.014816195991931586,\n \"mc2\": 0.3778468793879615,\n \"mc2_stderr\": 0.0139393481480023\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.6495659037095501,\n \"acc_stderr\": 0.013409047676670184\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.011372251705837756,\n \"acc_stderr\": 0.00292066619878875\n }\n}\n```", "repo_url": "https://huggingface.co/Evaloric/Evaloric-1.1B", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_02_02T15_22_05.810846", "path": ["**/details_harness|arc:challenge|25_2024-02-02T15-22-05.810846.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-02-02T15-22-05.810846.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_02_02T15_22_05.810846", "path": ["**/details_harness|gsm8k|5_2024-02-02T15-22-05.810846.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-02-02T15-22-05.810846.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_02_02T15_22_05.810846", "path": ["**/details_harness|hellaswag|10_2024-02-02T15-22-05.810846.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-02-02T15-22-05.810846.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_02_02T15_22_05.810846", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-02T15-22-05.810846.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-02-02T15-22-05.810846.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-02-02T15-22-05.810846.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-02-02T15-22-05.810846.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-02T15-22-05.810846.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-02-02T15-22-05.810846.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-02-02T15-22-05.810846.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-02-02T15-22-05.810846.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-02-02T15-22-05.810846.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-02-02T15-22-05.810846.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-02-02T15-22-05.810846.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-02-02T15-22-05.810846.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-02T15-22-05.810846.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-02-02T15-22-05.810846.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-02T15-22-05.810846.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-02T15-22-05.810846.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-02-02T15-22-05.810846.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-02-02T15-22-05.810846.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-02-02T15-22-05.810846.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-02T15-22-05.810846.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-02T15-22-05.810846.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-02T15-22-05.810846.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-02-02T15-22-05.810846.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-02T15-22-05.810846.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-02T15-22-05.810846.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-02T15-22-05.810846.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-02T15-22-05.810846.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-02-02T15-22-05.810846.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-02T15-22-05.810846.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-02T15-22-05.810846.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-02T15-22-05.810846.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-02T15-22-05.810846.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-02-02T15-22-05.810846.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-02-02T15-22-05.810846.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-02-02T15-22-05.810846.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-02-02T15-22-05.810846.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-02T15-22-05.810846.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-02-02T15-22-05.810846.parquet", "**/details_harness|hendrycksTest-management|5_2024-02-02T15-22-05.810846.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-02-02T15-22-05.810846.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-02-02T15-22-05.810846.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-02-02T15-22-05.810846.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-02-02T15-22-05.810846.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-02T15-22-05.810846.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-02-02T15-22-05.810846.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-02-02T15-22-05.810846.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-02-02T15-22-05.810846.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-02-02T15-22-05.810846.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-02-02T15-22-05.810846.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-02-02T15-22-05.810846.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-02-02T15-22-05.810846.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-02-02T15-22-05.810846.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-02-02T15-22-05.810846.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-02-02T15-22-05.810846.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-02T15-22-05.810846.parquet", "**/details_harness|hendrycksTest-virology|5_2024-02-02T15-22-05.810846.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-02-02T15-22-05.810846.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-02T15-22-05.810846.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-02-02T15-22-05.810846.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-02-02T15-22-05.810846.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-02-02T15-22-05.810846.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-02T15-22-05.810846.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-02-02T15-22-05.810846.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-02-02T15-22-05.810846.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-02-02T15-22-05.810846.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-02-02T15-22-05.810846.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-02-02T15-22-05.810846.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-02-02T15-22-05.810846.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-02-02T15-22-05.810846.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-02T15-22-05.810846.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-02-02T15-22-05.810846.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-02T15-22-05.810846.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-02T15-22-05.810846.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-02-02T15-22-05.810846.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-02-02T15-22-05.810846.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-02-02T15-22-05.810846.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-02T15-22-05.810846.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-02T15-22-05.810846.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-02T15-22-05.810846.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-02-02T15-22-05.810846.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-02T15-22-05.810846.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-02T15-22-05.810846.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-02T15-22-05.810846.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-02T15-22-05.810846.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-02-02T15-22-05.810846.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-02T15-22-05.810846.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-02T15-22-05.810846.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-02T15-22-05.810846.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-02T15-22-05.810846.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-02-02T15-22-05.810846.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-02-02T15-22-05.810846.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-02-02T15-22-05.810846.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-02-02T15-22-05.810846.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-02T15-22-05.810846.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-02-02T15-22-05.810846.parquet", "**/details_harness|hendrycksTest-management|5_2024-02-02T15-22-05.810846.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-02-02T15-22-05.810846.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-02-02T15-22-05.810846.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-02-02T15-22-05.810846.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-02-02T15-22-05.810846.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-02T15-22-05.810846.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-02-02T15-22-05.810846.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-02-02T15-22-05.810846.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-02-02T15-22-05.810846.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-02-02T15-22-05.810846.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-02-02T15-22-05.810846.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-02-02T15-22-05.810846.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-02-02T15-22-05.810846.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-02-02T15-22-05.810846.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-02-02T15-22-05.810846.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-02-02T15-22-05.810846.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-02T15-22-05.810846.parquet", "**/details_harness|hendrycksTest-virology|5_2024-02-02T15-22-05.810846.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-02-02T15-22-05.810846.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_02_02T15_22_05.810846", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-02T15-22-05.810846.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-02T15-22-05.810846.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_02_02T15_22_05.810846", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-02-02T15-22-05.810846.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-02-02T15-22-05.810846.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_02_02T15_22_05.810846", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-02-02T15-22-05.810846.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-02-02T15-22-05.810846.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_02_02T15_22_05.810846", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-02-02T15-22-05.810846.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-02-02T15-22-05.810846.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_02_02T15_22_05.810846", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-02T15-22-05.810846.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-02T15-22-05.810846.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_02_02T15_22_05.810846", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-02-02T15-22-05.810846.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-02-02T15-22-05.810846.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_02_02T15_22_05.810846", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-02-02T15-22-05.810846.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-02-02T15-22-05.810846.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_02_02T15_22_05.810846", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-02-02T15-22-05.810846.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-02-02T15-22-05.810846.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_02_02T15_22_05.810846", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-02-02T15-22-05.810846.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-02-02T15-22-05.810846.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_02_02T15_22_05.810846", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-02-02T15-22-05.810846.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-02-02T15-22-05.810846.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_02_02T15_22_05.810846", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-02-02T15-22-05.810846.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-02-02T15-22-05.810846.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_02_02T15_22_05.810846", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-02-02T15-22-05.810846.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-02-02T15-22-05.810846.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_02_02T15_22_05.810846", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-02T15-22-05.810846.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-02T15-22-05.810846.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_02_02T15_22_05.810846", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-02-02T15-22-05.810846.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-02-02T15-22-05.810846.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_02_02T15_22_05.810846", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-02T15-22-05.810846.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-02T15-22-05.810846.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_02_02T15_22_05.810846", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-02T15-22-05.810846.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-02T15-22-05.810846.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_02_02T15_22_05.810846", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-02-02T15-22-05.810846.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-02-02T15-22-05.810846.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_02_02T15_22_05.810846", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-02-02T15-22-05.810846.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-02-02T15-22-05.810846.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_02_02T15_22_05.810846", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-02-02T15-22-05.810846.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-02-02T15-22-05.810846.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_02_02T15_22_05.810846", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-02T15-22-05.810846.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-02T15-22-05.810846.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_02_02T15_22_05.810846", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-02T15-22-05.810846.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-02T15-22-05.810846.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_02_02T15_22_05.810846", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-02T15-22-05.810846.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-02T15-22-05.810846.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_02_02T15_22_05.810846", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-02-02T15-22-05.810846.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-02-02T15-22-05.810846.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_02_02T15_22_05.810846", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-02T15-22-05.810846.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-02T15-22-05.810846.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_02_02T15_22_05.810846", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-02T15-22-05.810846.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-02T15-22-05.810846.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_02_02T15_22_05.810846", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-02T15-22-05.810846.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-02T15-22-05.810846.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_02_02T15_22_05.810846", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-02T15-22-05.810846.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-02T15-22-05.810846.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_02_02T15_22_05.810846", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-02-02T15-22-05.810846.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-02-02T15-22-05.810846.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_02_02T15_22_05.810846", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-02T15-22-05.810846.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-02T15-22-05.810846.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_02_02T15_22_05.810846", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-02T15-22-05.810846.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-02T15-22-05.810846.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_02_02T15_22_05.810846", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-02T15-22-05.810846.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-02T15-22-05.810846.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_02_02T15_22_05.810846", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-02T15-22-05.810846.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-02T15-22-05.810846.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_02_02T15_22_05.810846", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-02-02T15-22-05.810846.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-02-02T15-22-05.810846.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_02_02T15_22_05.810846", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-02-02T15-22-05.810846.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-02-02T15-22-05.810846.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_02_02T15_22_05.810846", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-02-02T15-22-05.810846.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-02-02T15-22-05.810846.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_02_02T15_22_05.810846", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-02-02T15-22-05.810846.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-02-02T15-22-05.810846.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_02_02T15_22_05.810846", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-02T15-22-05.810846.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-02T15-22-05.810846.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_02_02T15_22_05.810846", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-02-02T15-22-05.810846.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-02-02T15-22-05.810846.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_02_02T15_22_05.810846", "path": ["**/details_harness|hendrycksTest-management|5_2024-02-02T15-22-05.810846.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-02-02T15-22-05.810846.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_02_02T15_22_05.810846", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-02-02T15-22-05.810846.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-02-02T15-22-05.810846.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_02_02T15_22_05.810846", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-02-02T15-22-05.810846.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-02-02T15-22-05.810846.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_02_02T15_22_05.810846", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-02-02T15-22-05.810846.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-02-02T15-22-05.810846.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_02_02T15_22_05.810846", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-02-02T15-22-05.810846.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-02-02T15-22-05.810846.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_02_02T15_22_05.810846", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-02T15-22-05.810846.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-02T15-22-05.810846.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_02_02T15_22_05.810846", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-02-02T15-22-05.810846.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-02-02T15-22-05.810846.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_02_02T15_22_05.810846", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-02-02T15-22-05.810846.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-02-02T15-22-05.810846.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_02_02T15_22_05.810846", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-02-02T15-22-05.810846.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-02-02T15-22-05.810846.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_02_02T15_22_05.810846", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-02-02T15-22-05.810846.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-02-02T15-22-05.810846.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_02_02T15_22_05.810846", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-02-02T15-22-05.810846.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-02-02T15-22-05.810846.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_02_02T15_22_05.810846", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-02-02T15-22-05.810846.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-02-02T15-22-05.810846.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_02_02T15_22_05.810846", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-02-02T15-22-05.810846.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-02-02T15-22-05.810846.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_02_02T15_22_05.810846", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-02-02T15-22-05.810846.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-02-02T15-22-05.810846.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_02_02T15_22_05.810846", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-02-02T15-22-05.810846.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-02-02T15-22-05.810846.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_02_02T15_22_05.810846", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-02-02T15-22-05.810846.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-02-02T15-22-05.810846.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_02_02T15_22_05.810846", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-02T15-22-05.810846.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-02T15-22-05.810846.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_02_02T15_22_05.810846", "path": ["**/details_harness|hendrycksTest-virology|5_2024-02-02T15-22-05.810846.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-02-02T15-22-05.810846.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_02_02T15_22_05.810846", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-02-02T15-22-05.810846.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-02-02T15-22-05.810846.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_02_02T15_22_05.810846", "path": ["**/details_harness|truthfulqa:mc|0_2024-02-02T15-22-05.810846.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-02-02T15-22-05.810846.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_02_02T15_22_05.810846", "path": ["**/details_harness|winogrande|5_2024-02-02T15-22-05.810846.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-02-02T15-22-05.810846.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_02_02T15_22_05.810846", "path": ["results_2024-02-02T15-22-05.810846.parquet"]}, {"split": "latest", "path": ["results_2024-02-02T15-22-05.810846.parquet"]}]}]}
2024-02-02T15:24:24+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of Evaloric/Evaloric-1.1B Dataset automatically created during the evaluation run of model Evaloric/Evaloric-1.1B on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-02-02T15:22:05.810846(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of Evaloric/Evaloric-1.1B\n\n\n\nDataset automatically created during the evaluation run of model Evaloric/Evaloric-1.1B on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-02-02T15:22:05.810846(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of Evaloric/Evaloric-1.1B\n\n\n\nDataset automatically created during the evaluation run of model Evaloric/Evaloric-1.1B on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-02-02T15:22:05.810846(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
ef35e8c7a01a87a374d6877ad6fe3f0400c3da3b
Conversion of [THUDM/webglm-qa](https://huggingface.co/datasets/THUDM/webglm-qa) dataset to be used in pretraining. Python code used for conversion: ```python from datasets import load_dataset import pandas import re dataset = load_dataset("THUDM/webglm-qa", split="train") def format(columns): return re.sub(r'\[\d\]', '', columns["answer"].strip()) pandas.DataFrame({"text": [format(columns) for columns in dataset]}).to_csv("train.csv", index=False) ```
Felladrin/pretrain-webglm-qa
[ "source_datasets:THUDM/webglm-qa", "language:en", "license:apache-2.0", "region:us" ]
2024-02-02T15:25:27+00:00
{"language": ["en"], "license": "apache-2.0", "source_datasets": ["THUDM/webglm-qa"]}
2024-02-09T08:01:09+00:00
[]
[ "en" ]
TAGS #source_datasets-THUDM/webglm-qa #language-English #license-apache-2.0 #region-us
Conversion of THUDM/webglm-qa dataset to be used in pretraining. Python code used for conversion:
[]
[ "TAGS\n#source_datasets-THUDM/webglm-qa #language-English #license-apache-2.0 #region-us \n" ]
72b0b09beec3f3af2c9cbc82ed5c44134deac8d2
# Dataset Card for Evaluation run of Sharathhebbar24/code_gpt2_mini_model <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [Sharathhebbar24/code_gpt2_mini_model](https://huggingface.co/Sharathhebbar24/code_gpt2_mini_model) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_Sharathhebbar24__code_gpt2_mini_model", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-02-02T15:41:13.540952](https://huggingface.co/datasets/open-llm-leaderboard/details_Sharathhebbar24__code_gpt2_mini_model/blob/main/results_2024-02-02T15-41-13.540952.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.24939264685687307, "acc_stderr": 0.030508768932231183, "acc_norm": 0.25044580537440064, "acc_norm_stderr": 0.03132454191182828, "mc1": 0.2423500611995104, "mc1_stderr": 0.01500067437357034, "mc2": 0.39863932434367527, "mc2_stderr": 0.01509297997669473 }, "harness|arc:challenge|25": { "acc": 0.18600682593856654, "acc_stderr": 0.01137094018326675, "acc_norm": 0.23720136518771331, "acc_norm_stderr": 0.01243039982926085 }, "harness|hellaswag|10": { "acc": 0.28888667596096396, "acc_stderr": 0.004523188431142895, "acc_norm": 0.31248755228042224, "acc_norm_stderr": 0.0046256009167749855 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.22, "acc_stderr": 0.04163331998932268, "acc_norm": 0.22, "acc_norm_stderr": 0.04163331998932268 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.3333333333333333, "acc_stderr": 0.04072314811876837, "acc_norm": 0.3333333333333333, "acc_norm_stderr": 0.04072314811876837 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.19736842105263158, "acc_stderr": 0.03238981601699397, "acc_norm": 0.19736842105263158, "acc_norm_stderr": 0.03238981601699397 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.24, "acc_stderr": 0.042923469599092816, "acc_norm": 0.24, "acc_norm_stderr": 0.042923469599092816 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.23018867924528302, "acc_stderr": 0.025907897122408173, "acc_norm": 0.23018867924528302, "acc_norm_stderr": 0.025907897122408173 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.2569444444444444, "acc_stderr": 0.03653946969442099, "acc_norm": 0.2569444444444444, "acc_norm_stderr": 0.03653946969442099 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.2, "acc_stderr": 0.04020151261036845, "acc_norm": 0.2, "acc_norm_stderr": 0.04020151261036845 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.34, "acc_stderr": 0.04760952285695236, "acc_norm": 0.34, "acc_norm_stderr": 0.04760952285695236 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.23, "acc_stderr": 0.042295258468165065, "acc_norm": 0.23, "acc_norm_stderr": 0.042295258468165065 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.2658959537572254, "acc_stderr": 0.03368762932259431, "acc_norm": 0.2658959537572254, "acc_norm_stderr": 0.03368762932259431 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.21568627450980393, "acc_stderr": 0.04092563958237654, "acc_norm": 0.21568627450980393, "acc_norm_stderr": 0.04092563958237654 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.31, "acc_stderr": 0.04648231987117316, "acc_norm": 0.31, "acc_norm_stderr": 0.04648231987117316 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.2680851063829787, "acc_stderr": 0.028957342788342343, "acc_norm": 0.2680851063829787, "acc_norm_stderr": 0.028957342788342343 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.2631578947368421, "acc_stderr": 0.04142439719489362, "acc_norm": 0.2631578947368421, "acc_norm_stderr": 0.04142439719489362 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.22758620689655173, "acc_stderr": 0.03493950380131184, "acc_norm": 0.22758620689655173, "acc_norm_stderr": 0.03493950380131184 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.2566137566137566, "acc_stderr": 0.022494510767503154, "acc_norm": 0.2566137566137566, "acc_norm_stderr": 0.022494510767503154 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.15079365079365079, "acc_stderr": 0.03200686497287392, "acc_norm": 0.15079365079365079, "acc_norm_stderr": 0.03200686497287392 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.15, "acc_stderr": 0.0358870281282637, "acc_norm": 0.15, "acc_norm_stderr": 0.0358870281282637 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.23225806451612904, "acc_stderr": 0.02402225613030824, "acc_norm": 0.23225806451612904, "acc_norm_stderr": 0.02402225613030824 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.2660098522167488, "acc_stderr": 0.03108982600293752, "acc_norm": 0.2660098522167488, "acc_norm_stderr": 0.03108982600293752 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.24, "acc_stderr": 0.04292346959909283, "acc_norm": 0.24, "acc_norm_stderr": 0.04292346959909283 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.23636363636363636, "acc_stderr": 0.03317505930009179, "acc_norm": 0.23636363636363636, "acc_norm_stderr": 0.03317505930009179 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.3383838383838384, "acc_stderr": 0.033711241426263014, "acc_norm": 0.3383838383838384, "acc_norm_stderr": 0.033711241426263014 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.23834196891191708, "acc_stderr": 0.030748905363909902, "acc_norm": 0.23834196891191708, "acc_norm_stderr": 0.030748905363909902 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.2205128205128205, "acc_stderr": 0.02102067268082791, "acc_norm": 0.2205128205128205, "acc_norm_stderr": 0.02102067268082791 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.23333333333333334, "acc_stderr": 0.025787874220959316, "acc_norm": 0.23333333333333334, "acc_norm_stderr": 0.025787874220959316 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.22268907563025211, "acc_stderr": 0.027025433498882364, "acc_norm": 0.22268907563025211, "acc_norm_stderr": 0.027025433498882364 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.2582781456953642, "acc_stderr": 0.035737053147634576, "acc_norm": 0.2582781456953642, "acc_norm_stderr": 0.035737053147634576 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.3229357798165138, "acc_stderr": 0.02004811592341533, "acc_norm": 0.3229357798165138, "acc_norm_stderr": 0.02004811592341533 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.2361111111111111, "acc_stderr": 0.028963702570791037, "acc_norm": 0.2361111111111111, "acc_norm_stderr": 0.028963702570791037 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.24509803921568626, "acc_stderr": 0.03019028245350194, "acc_norm": 0.24509803921568626, "acc_norm_stderr": 0.03019028245350194 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.23628691983122363, "acc_stderr": 0.027652153144159253, "acc_norm": 0.23628691983122363, "acc_norm_stderr": 0.027652153144159253 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.26905829596412556, "acc_stderr": 0.029763779406874972, "acc_norm": 0.26905829596412556, "acc_norm_stderr": 0.029763779406874972 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.21374045801526717, "acc_stderr": 0.0359546161177469, "acc_norm": 0.21374045801526717, "acc_norm_stderr": 0.0359546161177469 }, "harness|hendrycksTest-international_law|5": { "acc": 0.371900826446281, "acc_stderr": 0.044120158066245044, "acc_norm": 0.371900826446281, "acc_norm_stderr": 0.044120158066245044 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.25925925925925924, "acc_stderr": 0.042365112580946336, "acc_norm": 0.25925925925925924, "acc_norm_stderr": 0.042365112580946336 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.3006134969325153, "acc_stderr": 0.03602511318806771, "acc_norm": 0.3006134969325153, "acc_norm_stderr": 0.03602511318806771 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.21428571428571427, "acc_stderr": 0.03894641120044793, "acc_norm": 0.21428571428571427, "acc_norm_stderr": 0.03894641120044793 }, "harness|hendrycksTest-management|5": { "acc": 0.17475728155339806, "acc_stderr": 0.037601780060266224, "acc_norm": 0.17475728155339806, "acc_norm_stderr": 0.037601780060266224 }, "harness|hendrycksTest-marketing|5": { "acc": 0.29914529914529914, "acc_stderr": 0.029996951858349497, "acc_norm": 0.29914529914529914, "acc_norm_stderr": 0.029996951858349497 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.26, "acc_stderr": 0.0440844002276808, "acc_norm": 0.26, "acc_norm_stderr": 0.0440844002276808 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.2503192848020434, "acc_stderr": 0.015491088951494581, "acc_norm": 0.2503192848020434, "acc_norm_stderr": 0.015491088951494581 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.2254335260115607, "acc_stderr": 0.022497230190967547, "acc_norm": 0.2254335260115607, "acc_norm_stderr": 0.022497230190967547 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.2424581005586592, "acc_stderr": 0.014333522059217889, "acc_norm": 0.2424581005586592, "acc_norm_stderr": 0.014333522059217889 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.25163398692810457, "acc_stderr": 0.024848018263875195, "acc_norm": 0.25163398692810457, "acc_norm_stderr": 0.024848018263875195 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.19935691318327975, "acc_stderr": 0.022691033780549656, "acc_norm": 0.19935691318327975, "acc_norm_stderr": 0.022691033780549656 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.25925925925925924, "acc_stderr": 0.024383665531035457, "acc_norm": 0.25925925925925924, "acc_norm_stderr": 0.024383665531035457 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.2872340425531915, "acc_stderr": 0.02699219917306436, "acc_norm": 0.2872340425531915, "acc_norm_stderr": 0.02699219917306436 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.24837027379400262, "acc_stderr": 0.011035212598034503, "acc_norm": 0.24837027379400262, "acc_norm_stderr": 0.011035212598034503 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.33088235294117646, "acc_stderr": 0.02858270975389844, "acc_norm": 0.33088235294117646, "acc_norm_stderr": 0.02858270975389844 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.24836601307189543, "acc_stderr": 0.017479487001364764, "acc_norm": 0.24836601307189543, "acc_norm_stderr": 0.017479487001364764 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.18181818181818182, "acc_stderr": 0.036942843353378, "acc_norm": 0.18181818181818182, "acc_norm_stderr": 0.036942843353378 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.23265306122448978, "acc_stderr": 0.02704925791589618, "acc_norm": 0.23265306122448978, "acc_norm_stderr": 0.02704925791589618 }, "harness|hendrycksTest-sociology|5": { "acc": 0.2537313432835821, "acc_stderr": 0.03076944496729601, "acc_norm": 0.2537313432835821, "acc_norm_stderr": 0.03076944496729601 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.25, "acc_stderr": 0.04351941398892446, "acc_norm": 0.25, "acc_norm_stderr": 0.04351941398892446 }, "harness|hendrycksTest-virology|5": { "acc": 0.18674698795180722, "acc_stderr": 0.030338749144500597, "acc_norm": 0.18674698795180722, "acc_norm_stderr": 0.030338749144500597 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.3333333333333333, "acc_stderr": 0.03615507630310935, "acc_norm": 0.3333333333333333, "acc_norm_stderr": 0.03615507630310935 }, "harness|truthfulqa:mc|0": { "mc1": 0.2423500611995104, "mc1_stderr": 0.01500067437357034, "mc2": 0.39863932434367527, "mc2_stderr": 0.01509297997669473 }, "harness|winogrande|5": { "acc": 0.5114443567482242, "acc_stderr": 0.014048804199859332 }, "harness|gsm8k|5": { "acc": 0.0, "acc_stderr": 0.0 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_Sharathhebbar24__code_gpt2_mini_model
[ "region:us" ]
2024-02-02T15:42:34+00:00
{"pretty_name": "Evaluation run of Sharathhebbar24/code_gpt2_mini_model", "dataset_summary": "Dataset automatically created during the evaluation run of model [Sharathhebbar24/code_gpt2_mini_model](https://huggingface.co/Sharathhebbar24/code_gpt2_mini_model) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_Sharathhebbar24__code_gpt2_mini_model\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-02-02T15:41:13.540952](https://huggingface.co/datasets/open-llm-leaderboard/details_Sharathhebbar24__code_gpt2_mini_model/blob/main/results_2024-02-02T15-41-13.540952.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.24939264685687307,\n \"acc_stderr\": 0.030508768932231183,\n \"acc_norm\": 0.25044580537440064,\n \"acc_norm_stderr\": 0.03132454191182828,\n \"mc1\": 0.2423500611995104,\n \"mc1_stderr\": 0.01500067437357034,\n \"mc2\": 0.39863932434367527,\n \"mc2_stderr\": 0.01509297997669473\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.18600682593856654,\n \"acc_stderr\": 0.01137094018326675,\n \"acc_norm\": 0.23720136518771331,\n \"acc_norm_stderr\": 0.01243039982926085\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.28888667596096396,\n \"acc_stderr\": 0.004523188431142895,\n \"acc_norm\": 0.31248755228042224,\n \"acc_norm_stderr\": 0.0046256009167749855\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.22,\n \"acc_stderr\": 0.04163331998932268,\n \"acc_norm\": 0.22,\n \"acc_norm_stderr\": 0.04163331998932268\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.3333333333333333,\n \"acc_stderr\": 0.04072314811876837,\n \"acc_norm\": 0.3333333333333333,\n \"acc_norm_stderr\": 0.04072314811876837\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.19736842105263158,\n \"acc_stderr\": 0.03238981601699397,\n \"acc_norm\": 0.19736842105263158,\n \"acc_norm_stderr\": 0.03238981601699397\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.24,\n \"acc_stderr\": 0.042923469599092816,\n \"acc_norm\": 0.24,\n \"acc_norm_stderr\": 0.042923469599092816\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.23018867924528302,\n \"acc_stderr\": 0.025907897122408173,\n \"acc_norm\": 0.23018867924528302,\n \"acc_norm_stderr\": 0.025907897122408173\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.2569444444444444,\n \"acc_stderr\": 0.03653946969442099,\n \"acc_norm\": 0.2569444444444444,\n \"acc_norm_stderr\": 0.03653946969442099\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.2,\n \"acc_stderr\": 0.04020151261036845,\n \"acc_norm\": 0.2,\n \"acc_norm_stderr\": 0.04020151261036845\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.34,\n \"acc_stderr\": 0.04760952285695236,\n \"acc_norm\": 0.34,\n \"acc_norm_stderr\": 0.04760952285695236\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.23,\n \"acc_stderr\": 0.042295258468165065,\n \"acc_norm\": 0.23,\n \"acc_norm_stderr\": 0.042295258468165065\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.2658959537572254,\n \"acc_stderr\": 0.03368762932259431,\n \"acc_norm\": 0.2658959537572254,\n \"acc_norm_stderr\": 0.03368762932259431\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.21568627450980393,\n \"acc_stderr\": 0.04092563958237654,\n \"acc_norm\": 0.21568627450980393,\n \"acc_norm_stderr\": 0.04092563958237654\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.31,\n \"acc_stderr\": 0.04648231987117316,\n \"acc_norm\": 0.31,\n \"acc_norm_stderr\": 0.04648231987117316\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.2680851063829787,\n \"acc_stderr\": 0.028957342788342343,\n \"acc_norm\": 0.2680851063829787,\n \"acc_norm_stderr\": 0.028957342788342343\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.2631578947368421,\n \"acc_stderr\": 0.04142439719489362,\n \"acc_norm\": 0.2631578947368421,\n \"acc_norm_stderr\": 0.04142439719489362\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.22758620689655173,\n \"acc_stderr\": 0.03493950380131184,\n \"acc_norm\": 0.22758620689655173,\n \"acc_norm_stderr\": 0.03493950380131184\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.2566137566137566,\n \"acc_stderr\": 0.022494510767503154,\n \"acc_norm\": 0.2566137566137566,\n \"acc_norm_stderr\": 0.022494510767503154\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.15079365079365079,\n \"acc_stderr\": 0.03200686497287392,\n \"acc_norm\": 0.15079365079365079,\n \"acc_norm_stderr\": 0.03200686497287392\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.15,\n \"acc_stderr\": 0.0358870281282637,\n \"acc_norm\": 0.15,\n \"acc_norm_stderr\": 0.0358870281282637\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.23225806451612904,\n \"acc_stderr\": 0.02402225613030824,\n \"acc_norm\": 0.23225806451612904,\n \"acc_norm_stderr\": 0.02402225613030824\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.2660098522167488,\n \"acc_stderr\": 0.03108982600293752,\n \"acc_norm\": 0.2660098522167488,\n \"acc_norm_stderr\": 0.03108982600293752\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.24,\n \"acc_stderr\": 0.04292346959909283,\n \"acc_norm\": 0.24,\n \"acc_norm_stderr\": 0.04292346959909283\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.23636363636363636,\n \"acc_stderr\": 0.03317505930009179,\n \"acc_norm\": 0.23636363636363636,\n \"acc_norm_stderr\": 0.03317505930009179\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.3383838383838384,\n \"acc_stderr\": 0.033711241426263014,\n \"acc_norm\": 0.3383838383838384,\n \"acc_norm_stderr\": 0.033711241426263014\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.23834196891191708,\n \"acc_stderr\": 0.030748905363909902,\n \"acc_norm\": 0.23834196891191708,\n \"acc_norm_stderr\": 0.030748905363909902\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.2205128205128205,\n \"acc_stderr\": 0.02102067268082791,\n \"acc_norm\": 0.2205128205128205,\n \"acc_norm_stderr\": 0.02102067268082791\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.23333333333333334,\n \"acc_stderr\": 0.025787874220959316,\n \"acc_norm\": 0.23333333333333334,\n \"acc_norm_stderr\": 0.025787874220959316\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.22268907563025211,\n \"acc_stderr\": 0.027025433498882364,\n \"acc_norm\": 0.22268907563025211,\n \"acc_norm_stderr\": 0.027025433498882364\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.2582781456953642,\n \"acc_stderr\": 0.035737053147634576,\n \"acc_norm\": 0.2582781456953642,\n \"acc_norm_stderr\": 0.035737053147634576\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.3229357798165138,\n \"acc_stderr\": 0.02004811592341533,\n \"acc_norm\": 0.3229357798165138,\n \"acc_norm_stderr\": 0.02004811592341533\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.2361111111111111,\n \"acc_stderr\": 0.028963702570791037,\n \"acc_norm\": 0.2361111111111111,\n \"acc_norm_stderr\": 0.028963702570791037\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.24509803921568626,\n \"acc_stderr\": 0.03019028245350194,\n \"acc_norm\": 0.24509803921568626,\n \"acc_norm_stderr\": 0.03019028245350194\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.23628691983122363,\n \"acc_stderr\": 0.027652153144159253,\n \"acc_norm\": 0.23628691983122363,\n \"acc_norm_stderr\": 0.027652153144159253\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.26905829596412556,\n \"acc_stderr\": 0.029763779406874972,\n \"acc_norm\": 0.26905829596412556,\n \"acc_norm_stderr\": 0.029763779406874972\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.21374045801526717,\n \"acc_stderr\": 0.0359546161177469,\n \"acc_norm\": 0.21374045801526717,\n \"acc_norm_stderr\": 0.0359546161177469\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.371900826446281,\n \"acc_stderr\": 0.044120158066245044,\n \"acc_norm\": 0.371900826446281,\n \"acc_norm_stderr\": 0.044120158066245044\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.25925925925925924,\n \"acc_stderr\": 0.042365112580946336,\n \"acc_norm\": 0.25925925925925924,\n \"acc_norm_stderr\": 0.042365112580946336\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.3006134969325153,\n \"acc_stderr\": 0.03602511318806771,\n \"acc_norm\": 0.3006134969325153,\n \"acc_norm_stderr\": 0.03602511318806771\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.21428571428571427,\n \"acc_stderr\": 0.03894641120044793,\n \"acc_norm\": 0.21428571428571427,\n \"acc_norm_stderr\": 0.03894641120044793\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.17475728155339806,\n \"acc_stderr\": 0.037601780060266224,\n \"acc_norm\": 0.17475728155339806,\n \"acc_norm_stderr\": 0.037601780060266224\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.29914529914529914,\n \"acc_stderr\": 0.029996951858349497,\n \"acc_norm\": 0.29914529914529914,\n \"acc_norm_stderr\": 0.029996951858349497\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.26,\n \"acc_stderr\": 0.0440844002276808,\n \"acc_norm\": 0.26,\n \"acc_norm_stderr\": 0.0440844002276808\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.2503192848020434,\n \"acc_stderr\": 0.015491088951494581,\n \"acc_norm\": 0.2503192848020434,\n \"acc_norm_stderr\": 0.015491088951494581\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.2254335260115607,\n \"acc_stderr\": 0.022497230190967547,\n \"acc_norm\": 0.2254335260115607,\n \"acc_norm_stderr\": 0.022497230190967547\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.2424581005586592,\n \"acc_stderr\": 0.014333522059217889,\n \"acc_norm\": 0.2424581005586592,\n \"acc_norm_stderr\": 0.014333522059217889\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.25163398692810457,\n \"acc_stderr\": 0.024848018263875195,\n \"acc_norm\": 0.25163398692810457,\n \"acc_norm_stderr\": 0.024848018263875195\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.19935691318327975,\n \"acc_stderr\": 0.022691033780549656,\n \"acc_norm\": 0.19935691318327975,\n \"acc_norm_stderr\": 0.022691033780549656\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.25925925925925924,\n \"acc_stderr\": 0.024383665531035457,\n \"acc_norm\": 0.25925925925925924,\n \"acc_norm_stderr\": 0.024383665531035457\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.2872340425531915,\n \"acc_stderr\": 0.02699219917306436,\n \"acc_norm\": 0.2872340425531915,\n \"acc_norm_stderr\": 0.02699219917306436\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.24837027379400262,\n \"acc_stderr\": 0.011035212598034503,\n \"acc_norm\": 0.24837027379400262,\n \"acc_norm_stderr\": 0.011035212598034503\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.33088235294117646,\n \"acc_stderr\": 0.02858270975389844,\n \"acc_norm\": 0.33088235294117646,\n \"acc_norm_stderr\": 0.02858270975389844\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.24836601307189543,\n \"acc_stderr\": 0.017479487001364764,\n \"acc_norm\": 0.24836601307189543,\n \"acc_norm_stderr\": 0.017479487001364764\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.18181818181818182,\n \"acc_stderr\": 0.036942843353378,\n \"acc_norm\": 0.18181818181818182,\n \"acc_norm_stderr\": 0.036942843353378\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.23265306122448978,\n \"acc_stderr\": 0.02704925791589618,\n \"acc_norm\": 0.23265306122448978,\n \"acc_norm_stderr\": 0.02704925791589618\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.2537313432835821,\n \"acc_stderr\": 0.03076944496729601,\n \"acc_norm\": 0.2537313432835821,\n \"acc_norm_stderr\": 0.03076944496729601\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.25,\n \"acc_stderr\": 0.04351941398892446,\n \"acc_norm\": 0.25,\n \"acc_norm_stderr\": 0.04351941398892446\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.18674698795180722,\n \"acc_stderr\": 0.030338749144500597,\n \"acc_norm\": 0.18674698795180722,\n \"acc_norm_stderr\": 0.030338749144500597\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.3333333333333333,\n \"acc_stderr\": 0.03615507630310935,\n \"acc_norm\": 0.3333333333333333,\n \"acc_norm_stderr\": 0.03615507630310935\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.2423500611995104,\n \"mc1_stderr\": 0.01500067437357034,\n \"mc2\": 0.39863932434367527,\n \"mc2_stderr\": 0.01509297997669473\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.5114443567482242,\n \"acc_stderr\": 0.014048804199859332\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.0,\n \"acc_stderr\": 0.0\n }\n}\n```", "repo_url": "https://huggingface.co/Sharathhebbar24/code_gpt2_mini_model", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_02_02T15_41_13.540952", "path": ["**/details_harness|arc:challenge|25_2024-02-02T15-41-13.540952.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-02-02T15-41-13.540952.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_02_02T15_41_13.540952", "path": ["**/details_harness|gsm8k|5_2024-02-02T15-41-13.540952.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-02-02T15-41-13.540952.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_02_02T15_41_13.540952", "path": ["**/details_harness|hellaswag|10_2024-02-02T15-41-13.540952.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-02-02T15-41-13.540952.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_02_02T15_41_13.540952", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-02T15-41-13.540952.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-02-02T15-41-13.540952.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-02-02T15-41-13.540952.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-02-02T15-41-13.540952.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-02T15-41-13.540952.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-02-02T15-41-13.540952.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-02-02T15-41-13.540952.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-02-02T15-41-13.540952.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-02-02T15-41-13.540952.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-02-02T15-41-13.540952.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-02-02T15-41-13.540952.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-02-02T15-41-13.540952.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-02T15-41-13.540952.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-02-02T15-41-13.540952.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-02T15-41-13.540952.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-02T15-41-13.540952.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-02-02T15-41-13.540952.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-02-02T15-41-13.540952.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-02-02T15-41-13.540952.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-02T15-41-13.540952.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-02T15-41-13.540952.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-02T15-41-13.540952.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-02-02T15-41-13.540952.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-02T15-41-13.540952.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-02T15-41-13.540952.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-02T15-41-13.540952.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-02T15-41-13.540952.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-02-02T15-41-13.540952.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-02T15-41-13.540952.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-02T15-41-13.540952.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-02T15-41-13.540952.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-02T15-41-13.540952.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-02-02T15-41-13.540952.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-02-02T15-41-13.540952.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-02-02T15-41-13.540952.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-02-02T15-41-13.540952.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-02T15-41-13.540952.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-02-02T15-41-13.540952.parquet", "**/details_harness|hendrycksTest-management|5_2024-02-02T15-41-13.540952.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-02-02T15-41-13.540952.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-02-02T15-41-13.540952.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-02-02T15-41-13.540952.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-02-02T15-41-13.540952.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-02T15-41-13.540952.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-02-02T15-41-13.540952.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-02-02T15-41-13.540952.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-02-02T15-41-13.540952.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-02-02T15-41-13.540952.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-02-02T15-41-13.540952.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-02-02T15-41-13.540952.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-02-02T15-41-13.540952.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-02-02T15-41-13.540952.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-02-02T15-41-13.540952.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-02-02T15-41-13.540952.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-02T15-41-13.540952.parquet", "**/details_harness|hendrycksTest-virology|5_2024-02-02T15-41-13.540952.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-02-02T15-41-13.540952.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-02T15-41-13.540952.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-02-02T15-41-13.540952.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-02-02T15-41-13.540952.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-02-02T15-41-13.540952.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-02T15-41-13.540952.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-02-02T15-41-13.540952.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-02-02T15-41-13.540952.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-02-02T15-41-13.540952.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-02-02T15-41-13.540952.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-02-02T15-41-13.540952.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-02-02T15-41-13.540952.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-02-02T15-41-13.540952.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-02T15-41-13.540952.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-02-02T15-41-13.540952.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-02T15-41-13.540952.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-02T15-41-13.540952.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-02-02T15-41-13.540952.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-02-02T15-41-13.540952.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-02-02T15-41-13.540952.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-02T15-41-13.540952.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-02T15-41-13.540952.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-02T15-41-13.540952.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-02-02T15-41-13.540952.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-02T15-41-13.540952.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-02T15-41-13.540952.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-02T15-41-13.540952.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-02T15-41-13.540952.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-02-02T15-41-13.540952.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-02T15-41-13.540952.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-02T15-41-13.540952.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-02T15-41-13.540952.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-02T15-41-13.540952.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-02-02T15-41-13.540952.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-02-02T15-41-13.540952.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-02-02T15-41-13.540952.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-02-02T15-41-13.540952.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-02T15-41-13.540952.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-02-02T15-41-13.540952.parquet", "**/details_harness|hendrycksTest-management|5_2024-02-02T15-41-13.540952.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-02-02T15-41-13.540952.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-02-02T15-41-13.540952.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-02-02T15-41-13.540952.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-02-02T15-41-13.540952.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-02T15-41-13.540952.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-02-02T15-41-13.540952.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-02-02T15-41-13.540952.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-02-02T15-41-13.540952.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-02-02T15-41-13.540952.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-02-02T15-41-13.540952.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-02-02T15-41-13.540952.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-02-02T15-41-13.540952.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-02-02T15-41-13.540952.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-02-02T15-41-13.540952.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-02-02T15-41-13.540952.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-02T15-41-13.540952.parquet", "**/details_harness|hendrycksTest-virology|5_2024-02-02T15-41-13.540952.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-02-02T15-41-13.540952.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_02_02T15_41_13.540952", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-02T15-41-13.540952.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-02T15-41-13.540952.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_02_02T15_41_13.540952", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-02-02T15-41-13.540952.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-02-02T15-41-13.540952.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_02_02T15_41_13.540952", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-02-02T15-41-13.540952.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-02-02T15-41-13.540952.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_02_02T15_41_13.540952", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-02-02T15-41-13.540952.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-02-02T15-41-13.540952.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_02_02T15_41_13.540952", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-02T15-41-13.540952.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-02T15-41-13.540952.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_02_02T15_41_13.540952", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-02-02T15-41-13.540952.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-02-02T15-41-13.540952.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_02_02T15_41_13.540952", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-02-02T15-41-13.540952.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-02-02T15-41-13.540952.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_02_02T15_41_13.540952", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-02-02T15-41-13.540952.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-02-02T15-41-13.540952.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_02_02T15_41_13.540952", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-02-02T15-41-13.540952.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-02-02T15-41-13.540952.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_02_02T15_41_13.540952", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-02-02T15-41-13.540952.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-02-02T15-41-13.540952.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_02_02T15_41_13.540952", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-02-02T15-41-13.540952.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-02-02T15-41-13.540952.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_02_02T15_41_13.540952", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-02-02T15-41-13.540952.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-02-02T15-41-13.540952.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_02_02T15_41_13.540952", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-02T15-41-13.540952.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-02T15-41-13.540952.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_02_02T15_41_13.540952", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-02-02T15-41-13.540952.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-02-02T15-41-13.540952.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_02_02T15_41_13.540952", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-02T15-41-13.540952.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-02T15-41-13.540952.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_02_02T15_41_13.540952", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-02T15-41-13.540952.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-02T15-41-13.540952.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_02_02T15_41_13.540952", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-02-02T15-41-13.540952.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-02-02T15-41-13.540952.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_02_02T15_41_13.540952", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-02-02T15-41-13.540952.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-02-02T15-41-13.540952.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_02_02T15_41_13.540952", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-02-02T15-41-13.540952.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-02-02T15-41-13.540952.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_02_02T15_41_13.540952", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-02T15-41-13.540952.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-02T15-41-13.540952.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_02_02T15_41_13.540952", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-02T15-41-13.540952.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-02T15-41-13.540952.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_02_02T15_41_13.540952", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-02T15-41-13.540952.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-02T15-41-13.540952.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_02_02T15_41_13.540952", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-02-02T15-41-13.540952.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-02-02T15-41-13.540952.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_02_02T15_41_13.540952", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-02T15-41-13.540952.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-02T15-41-13.540952.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_02_02T15_41_13.540952", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-02T15-41-13.540952.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-02T15-41-13.540952.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_02_02T15_41_13.540952", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-02T15-41-13.540952.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-02T15-41-13.540952.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_02_02T15_41_13.540952", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-02T15-41-13.540952.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-02T15-41-13.540952.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_02_02T15_41_13.540952", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-02-02T15-41-13.540952.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-02-02T15-41-13.540952.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_02_02T15_41_13.540952", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-02T15-41-13.540952.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-02T15-41-13.540952.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_02_02T15_41_13.540952", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-02T15-41-13.540952.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-02T15-41-13.540952.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_02_02T15_41_13.540952", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-02T15-41-13.540952.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-02T15-41-13.540952.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_02_02T15_41_13.540952", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-02T15-41-13.540952.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-02T15-41-13.540952.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_02_02T15_41_13.540952", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-02-02T15-41-13.540952.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-02-02T15-41-13.540952.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_02_02T15_41_13.540952", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-02-02T15-41-13.540952.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-02-02T15-41-13.540952.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_02_02T15_41_13.540952", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-02-02T15-41-13.540952.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-02-02T15-41-13.540952.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_02_02T15_41_13.540952", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-02-02T15-41-13.540952.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-02-02T15-41-13.540952.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_02_02T15_41_13.540952", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-02T15-41-13.540952.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-02T15-41-13.540952.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_02_02T15_41_13.540952", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-02-02T15-41-13.540952.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-02-02T15-41-13.540952.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_02_02T15_41_13.540952", "path": ["**/details_harness|hendrycksTest-management|5_2024-02-02T15-41-13.540952.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-02-02T15-41-13.540952.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_02_02T15_41_13.540952", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-02-02T15-41-13.540952.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-02-02T15-41-13.540952.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_02_02T15_41_13.540952", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-02-02T15-41-13.540952.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-02-02T15-41-13.540952.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_02_02T15_41_13.540952", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-02-02T15-41-13.540952.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-02-02T15-41-13.540952.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_02_02T15_41_13.540952", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-02-02T15-41-13.540952.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-02-02T15-41-13.540952.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_02_02T15_41_13.540952", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-02T15-41-13.540952.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-02T15-41-13.540952.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_02_02T15_41_13.540952", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-02-02T15-41-13.540952.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-02-02T15-41-13.540952.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_02_02T15_41_13.540952", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-02-02T15-41-13.540952.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-02-02T15-41-13.540952.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_02_02T15_41_13.540952", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-02-02T15-41-13.540952.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-02-02T15-41-13.540952.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_02_02T15_41_13.540952", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-02-02T15-41-13.540952.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-02-02T15-41-13.540952.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_02_02T15_41_13.540952", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-02-02T15-41-13.540952.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-02-02T15-41-13.540952.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_02_02T15_41_13.540952", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-02-02T15-41-13.540952.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-02-02T15-41-13.540952.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_02_02T15_41_13.540952", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-02-02T15-41-13.540952.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-02-02T15-41-13.540952.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_02_02T15_41_13.540952", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-02-02T15-41-13.540952.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-02-02T15-41-13.540952.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_02_02T15_41_13.540952", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-02-02T15-41-13.540952.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-02-02T15-41-13.540952.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_02_02T15_41_13.540952", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-02-02T15-41-13.540952.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-02-02T15-41-13.540952.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_02_02T15_41_13.540952", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-02T15-41-13.540952.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-02T15-41-13.540952.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_02_02T15_41_13.540952", "path": ["**/details_harness|hendrycksTest-virology|5_2024-02-02T15-41-13.540952.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-02-02T15-41-13.540952.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_02_02T15_41_13.540952", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-02-02T15-41-13.540952.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-02-02T15-41-13.540952.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_02_02T15_41_13.540952", "path": ["**/details_harness|truthfulqa:mc|0_2024-02-02T15-41-13.540952.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-02-02T15-41-13.540952.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_02_02T15_41_13.540952", "path": ["**/details_harness|winogrande|5_2024-02-02T15-41-13.540952.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-02-02T15-41-13.540952.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_02_02T15_41_13.540952", "path": ["results_2024-02-02T15-41-13.540952.parquet"]}, {"split": "latest", "path": ["results_2024-02-02T15-41-13.540952.parquet"]}]}]}
2024-02-02T15:42:58+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of Sharathhebbar24/code_gpt2_mini_model Dataset automatically created during the evaluation run of model Sharathhebbar24/code_gpt2_mini_model on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-02-02T15:41:13.540952(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of Sharathhebbar24/code_gpt2_mini_model\n\n\n\nDataset automatically created during the evaluation run of model Sharathhebbar24/code_gpt2_mini_model on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-02-02T15:41:13.540952(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of Sharathhebbar24/code_gpt2_mini_model\n\n\n\nDataset automatically created during the evaluation run of model Sharathhebbar24/code_gpt2_mini_model on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-02-02T15:41:13.540952(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
edf6bbd219cf0709757f547e944feb0e61a6b605
# Dataset Card for "Wikipedia-lt" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
gintareP/Wikipedia-lt
[ "region:us" ]
2024-02-02T16:15:11+00:00
{"dataset_info": {"features": [{"name": "Title", "dtype": "string"}, {"name": "Article", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 244686414, "num_examples": 727740}], "download_size": 146211953, "dataset_size": 244686414}}
2024-02-02T16:19:26+00:00
[]
[]
TAGS #region-us
# Dataset Card for "Wikipedia-lt" More Information needed
[ "# Dataset Card for \"Wikipedia-lt\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"Wikipedia-lt\"\n\nMore Information needed" ]
ade023b7aefa0aef41f98e4ed1bafd30b5274b4c
# Dataset Card for "lmind_hotpot_train1000_eval500_v1_qa" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
tyzhu/lmind_hotpot_train1000_eval500_v1_qa
[ "region:us" ]
2024-02-02T16:20:14+00:00
{"configs": [{"config_name": "default", "data_files": [{"split": "train_qa", "path": "data/train_qa-*"}, {"split": "train_recite_qa", "path": "data/train_recite_qa-*"}, {"split": "eval_qa", "path": "data/eval_qa-*"}, {"split": "eval_recite_qa", "path": "data/eval_recite_qa-*"}, {"split": "all_docs", "path": "data/all_docs-*"}, {"split": "all_docs_eval", "path": "data/all_docs_eval-*"}, {"split": "train", "path": "data/train-*"}, {"split": "validation", "path": "data/validation-*"}]}], "dataset_info": {"features": [{"name": "inputs", "dtype": "string"}, {"name": "targets", "dtype": "string"}, {"name": "answers", "struct": [{"name": "answer_start", "sequence": "null"}, {"name": "text", "sequence": "string"}]}], "splits": [{"name": "train_qa", "num_bytes": 173266, "num_examples": 1000}, {"name": "train_recite_qa", "num_bytes": 1052784, "num_examples": 1000}, {"name": "eval_qa", "num_bytes": 81677, "num_examples": 500}, {"name": "eval_recite_qa", "num_bytes": 542914, "num_examples": 500}, {"name": "all_docs", "num_bytes": 1370698, "num_examples": 2959}, {"name": "all_docs_eval", "num_bytes": 1370509, "num_examples": 2959}, {"name": "train", "num_bytes": 173266, "num_examples": 1000}, {"name": "validation", "num_bytes": 81677, "num_examples": 500}], "download_size": 2985172, "dataset_size": 4846791}}
2024-02-02T16:20:37+00:00
[]
[]
TAGS #region-us
# Dataset Card for "lmind_hotpot_train1000_eval500_v1_qa" More Information needed
[ "# Dataset Card for \"lmind_hotpot_train1000_eval500_v1_qa\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"lmind_hotpot_train1000_eval500_v1_qa\"\n\nMore Information needed" ]
ec344653a645bd46c0991d9c09367d4e921b5f5d
# Dataset Card for "lmind_hotpot_train1000_eval500_v1_doc" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
tyzhu/lmind_hotpot_train1000_eval500_v1_doc
[ "region:us" ]
2024-02-02T16:20:39+00:00
{"configs": [{"config_name": "default", "data_files": [{"split": "train_qa", "path": "data/train_qa-*"}, {"split": "train_recite_qa", "path": "data/train_recite_qa-*"}, {"split": "eval_qa", "path": "data/eval_qa-*"}, {"split": "eval_recite_qa", "path": "data/eval_recite_qa-*"}, {"split": "all_docs", "path": "data/all_docs-*"}, {"split": "all_docs_eval", "path": "data/all_docs_eval-*"}, {"split": "train", "path": "data/train-*"}, {"split": "validation", "path": "data/validation-*"}]}], "dataset_info": {"features": [{"name": "inputs", "dtype": "string"}, {"name": "targets", "dtype": "string"}, {"name": "answers", "struct": [{"name": "answer_start", "sequence": "null"}, {"name": "text", "sequence": "string"}]}], "splits": [{"name": "train_qa", "num_bytes": 173266, "num_examples": 1000}, {"name": "train_recite_qa", "num_bytes": 1052784, "num_examples": 1000}, {"name": "eval_qa", "num_bytes": 81677, "num_examples": 500}, {"name": "eval_recite_qa", "num_bytes": 542914, "num_examples": 500}, {"name": "all_docs", "num_bytes": 1370698, "num_examples": 2959}, {"name": "all_docs_eval", "num_bytes": 1370509, "num_examples": 2959}, {"name": "train", "num_bytes": 1370698, "num_examples": 2959}, {"name": "validation", "num_bytes": 1370698, "num_examples": 2959}], "download_size": 4478537, "dataset_size": 7333244}}
2024-02-02T16:21:15+00:00
[]
[]
TAGS #region-us
# Dataset Card for "lmind_hotpot_train1000_eval500_v1_doc" More Information needed
[ "# Dataset Card for \"lmind_hotpot_train1000_eval500_v1_doc\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"lmind_hotpot_train1000_eval500_v1_doc\"\n\nMore Information needed" ]
ccee97c9ad6819c19e86053bb4e8c3ea60413d80
# Dataset Card for "lmind_hotpot_train1000_eval500_v1_docidx" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
tyzhu/lmind_hotpot_train1000_eval500_v1_docidx
[ "region:us" ]
2024-02-02T16:21:15+00:00
{"configs": [{"config_name": "default", "data_files": [{"split": "train_qa", "path": "data/train_qa-*"}, {"split": "train_recite_qa", "path": "data/train_recite_qa-*"}, {"split": "eval_qa", "path": "data/eval_qa-*"}, {"split": "eval_recite_qa", "path": "data/eval_recite_qa-*"}, {"split": "all_docs", "path": "data/all_docs-*"}, {"split": "all_docs_eval", "path": "data/all_docs_eval-*"}, {"split": "train", "path": "data/train-*"}, {"split": "validation", "path": "data/validation-*"}]}], "dataset_info": {"features": [{"name": "inputs", "dtype": "string"}, {"name": "targets", "dtype": "string"}, {"name": "answers", "struct": [{"name": "answer_start", "sequence": "null"}, {"name": "text", "sequence": "string"}]}], "splits": [{"name": "train_qa", "num_bytes": 173266, "num_examples": 1000}, {"name": "train_recite_qa", "num_bytes": 1052784, "num_examples": 1000}, {"name": "eval_qa", "num_bytes": 81677, "num_examples": 500}, {"name": "eval_recite_qa", "num_bytes": 542914, "num_examples": 500}, {"name": "all_docs", "num_bytes": 1370698, "num_examples": 2959}, {"name": "all_docs_eval", "num_bytes": 1370509, "num_examples": 2959}, {"name": "train", "num_bytes": 1370698, "num_examples": 2959}, {"name": "validation", "num_bytes": 1370509, "num_examples": 2959}], "download_size": 4500295, "dataset_size": 7333055}}
2024-02-02T16:21:46+00:00
[]
[]
TAGS #region-us
# Dataset Card for "lmind_hotpot_train1000_eval500_v1_docidx" More Information needed
[ "# Dataset Card for \"lmind_hotpot_train1000_eval500_v1_docidx\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"lmind_hotpot_train1000_eval500_v1_docidx\"\n\nMore Information needed" ]
a882148e1852bca4075bf6d74d3fdb9540367823
# Dataset Card for "lmind_hotpot_train1000_eval500_v1_doc_qa" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
tyzhu/lmind_hotpot_train1000_eval500_v1_doc_qa
[ "region:us" ]
2024-02-02T16:21:49+00:00
{"configs": [{"config_name": "default", "data_files": [{"split": "train_qa", "path": "data/train_qa-*"}, {"split": "train_recite_qa", "path": "data/train_recite_qa-*"}, {"split": "eval_qa", "path": "data/eval_qa-*"}, {"split": "eval_recite_qa", "path": "data/eval_recite_qa-*"}, {"split": "all_docs", "path": "data/all_docs-*"}, {"split": "all_docs_eval", "path": "data/all_docs_eval-*"}, {"split": "train", "path": "data/train-*"}, {"split": "validation", "path": "data/validation-*"}]}], "dataset_info": {"features": [{"name": "inputs", "dtype": "string"}, {"name": "targets", "dtype": "string"}, {"name": "answers", "struct": [{"name": "answer_start", "sequence": "null"}, {"name": "text", "sequence": "string"}]}], "splits": [{"name": "train_qa", "num_bytes": 173266, "num_examples": 1000}, {"name": "train_recite_qa", "num_bytes": 1052784, "num_examples": 1000}, {"name": "eval_qa", "num_bytes": 81677, "num_examples": 500}, {"name": "eval_recite_qa", "num_bytes": 542914, "num_examples": 500}, {"name": "all_docs", "num_bytes": 1370698, "num_examples": 2959}, {"name": "all_docs_eval", "num_bytes": 1370509, "num_examples": 2959}, {"name": "train", "num_bytes": 1543964, "num_examples": 3959}, {"name": "validation", "num_bytes": 81677, "num_examples": 500}], "download_size": 3811808, "dataset_size": 6217489}}
2024-02-02T16:22:12+00:00
[]
[]
TAGS #region-us
# Dataset Card for "lmind_hotpot_train1000_eval500_v1_doc_qa" More Information needed
[ "# Dataset Card for \"lmind_hotpot_train1000_eval500_v1_doc_qa\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"lmind_hotpot_train1000_eval500_v1_doc_qa\"\n\nMore Information needed" ]
9cebc9710120dff27711e53b7c993643e0453fde
# Dataset Card for "lmind_hotpot_train1000_eval500_v1_recite_qa" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
tyzhu/lmind_hotpot_train1000_eval500_v1_recite_qa
[ "region:us" ]
2024-02-02T16:22:13+00:00
{"configs": [{"config_name": "default", "data_files": [{"split": "train_qa", "path": "data/train_qa-*"}, {"split": "train_recite_qa", "path": "data/train_recite_qa-*"}, {"split": "eval_qa", "path": "data/eval_qa-*"}, {"split": "eval_recite_qa", "path": "data/eval_recite_qa-*"}, {"split": "all_docs", "path": "data/all_docs-*"}, {"split": "all_docs_eval", "path": "data/all_docs_eval-*"}, {"split": "train", "path": "data/train-*"}, {"split": "validation", "path": "data/validation-*"}]}], "dataset_info": {"features": [{"name": "inputs", "dtype": "string"}, {"name": "targets", "dtype": "string"}, {"name": "answers", "struct": [{"name": "answer_start", "sequence": "null"}, {"name": "text", "sequence": "string"}]}], "splits": [{"name": "train_qa", "num_bytes": 173266, "num_examples": 1000}, {"name": "train_recite_qa", "num_bytes": 1052784, "num_examples": 1000}, {"name": "eval_qa", "num_bytes": 81677, "num_examples": 500}, {"name": "eval_recite_qa", "num_bytes": 542914, "num_examples": 500}, {"name": "all_docs", "num_bytes": 1370698, "num_examples": 2959}, {"name": "all_docs_eval", "num_bytes": 1370509, "num_examples": 2959}, {"name": "train", "num_bytes": 2423482, "num_examples": 3959}, {"name": "validation", "num_bytes": 542914, "num_examples": 500}], "download_size": 4626743, "dataset_size": 7558244}}
2024-02-02T16:22:34+00:00
[]
[]
TAGS #region-us
# Dataset Card for "lmind_hotpot_train1000_eval500_v1_recite_qa" More Information needed
[ "# Dataset Card for \"lmind_hotpot_train1000_eval500_v1_recite_qa\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"lmind_hotpot_train1000_eval500_v1_recite_qa\"\n\nMore Information needed" ]
5c50ffbf872a568888e0fa468933746d6c7589c4
# Dataset Card for "lmind_hotpot_train1000_eval500_v1_reciteonly_qa" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
tyzhu/lmind_hotpot_train1000_eval500_v1_reciteonly_qa
[ "region:us" ]
2024-02-02T16:22:35+00:00
{"configs": [{"config_name": "default", "data_files": [{"split": "train_qa", "path": "data/train_qa-*"}, {"split": "train_recite_qa", "path": "data/train_recite_qa-*"}, {"split": "eval_qa", "path": "data/eval_qa-*"}, {"split": "eval_recite_qa", "path": "data/eval_recite_qa-*"}, {"split": "all_docs", "path": "data/all_docs-*"}, {"split": "all_docs_eval", "path": "data/all_docs_eval-*"}, {"split": "train", "path": "data/train-*"}, {"split": "validation", "path": "data/validation-*"}]}], "dataset_info": {"features": [{"name": "inputs", "dtype": "string"}, {"name": "targets", "dtype": "string"}, {"name": "answers", "struct": [{"name": "answer_start", "sequence": "null"}, {"name": "text", "sequence": "string"}]}], "splits": [{"name": "train_qa", "num_bytes": 173266, "num_examples": 1000}, {"name": "train_recite_qa", "num_bytes": 1052784, "num_examples": 1000}, {"name": "eval_qa", "num_bytes": 81677, "num_examples": 500}, {"name": "eval_recite_qa", "num_bytes": 542914, "num_examples": 500}, {"name": "all_docs", "num_bytes": 1370698, "num_examples": 2959}, {"name": "all_docs_eval", "num_bytes": 1370509, "num_examples": 2959}, {"name": "train", "num_bytes": 1052784, "num_examples": 1000}, {"name": "validation", "num_bytes": 542914, "num_examples": 500}], "download_size": 3799657, "dataset_size": 6187546}}
2024-02-02T16:23:00+00:00
[]
[]
TAGS #region-us
# Dataset Card for "lmind_hotpot_train1000_eval500_v1_reciteonly_qa" More Information needed
[ "# Dataset Card for \"lmind_hotpot_train1000_eval500_v1_reciteonly_qa\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"lmind_hotpot_train1000_eval500_v1_reciteonly_qa\"\n\nMore Information needed" ]